ada-tree.h (DECL_RETURN_VALUE_P): New macro.
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2016, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
99
100 /* Fake handler for attributes we don't properly support, typically because
101 they'd require dragging a lot of the common-c front-end circuitry. */
102 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
103
104 /* Table of machine-independent internal attributes for Ada. We support
105 this minimal set of attributes to accommodate the needs of builtins. */
106 const struct attribute_spec gnat_internal_attribute_table[] =
107 {
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "const", 0, 0, true, false, false, handle_const_attribute,
111 false },
112 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
113 false },
114 { "pure", 0, 0, true, false, false, handle_pure_attribute,
115 false },
116 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
117 false },
118 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
119 false },
120 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
121 false },
122 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
123 false },
124 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
125 false },
126 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
144
145 { NULL, 0, 0, false, false, false, NULL, false }
146 };
147
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196 };
197
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level *current_binding_level;
200
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context;
206
207 /* An array of global declarations. */
208 static GTY(()) vec<tree, va_gc> *global_decls;
209
210 /* An array of builtin function declarations. */
211 static GTY(()) vec<tree, va_gc> *builtin_decls;
212
213 /* A chain of unused BLOCK nodes. */
214 static GTY((deletable)) tree free_block_chain;
215
216 /* A hash table of padded types. It is modelled on the generic type
217 hash table in tree.c, which must thus be used as a reference. */
218
219 struct GTY((for_user)) pad_type_hash {
220 unsigned long hash;
221 tree type;
222 };
223
224 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
225 {
226 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
227 static bool equal (pad_type_hash *a, pad_type_hash *b);
228 static int keep_cache_entry (pad_type_hash *&);
229 };
230
231 static GTY ((cache))
232 hash_table<pad_type_hasher> *pad_type_hash_table;
233
234 static tree merge_sizes (tree, tree, tree, bool, bool);
235 static tree compute_related_constant (tree, tree);
236 static tree split_plus (tree, tree *);
237 static tree float_type_for_precision (int, machine_mode);
238 static tree convert_to_fat_pointer (tree, tree);
239 static unsigned int scale_by_factor_of (tree, unsigned int);
240 static bool potential_alignment_gap (tree, tree, tree);
241
242 /* A linked list used as a queue to defer the initialization of the
243 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
244 of ..._TYPE nodes. */
245 struct deferred_decl_context_node
246 {
247 tree decl; /* The ..._DECL node to work on. */
248 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
249 int force_global; /* force_global value when pushing DECL. */
250 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
251 context to. */
252 struct deferred_decl_context_node *next; /* The next queue item. */
253 };
254
255 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
256
257 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
258 feed it with the elaboration of GNAT_SCOPE. */
259 static struct deferred_decl_context_node *
260 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
261
262 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
263 feed it with the DECL_CONTEXT computed as part of N as soon as it is
264 computed. */
265 static void add_deferred_type_context (struct deferred_decl_context_node *n,
266 tree type);
267 \f
268 /* Initialize data structures of the utils.c module. */
269
270 void
271 init_gnat_utils (void)
272 {
273 /* Initialize the association of GNAT nodes to GCC trees. */
274 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
275
276 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
277 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
278
279 /* Initialize the hash table of padded types. */
280 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
281 }
282
283 /* Destroy data structures of the utils.c module. */
284
285 void
286 destroy_gnat_utils (void)
287 {
288 /* Destroy the association of GNAT nodes to GCC trees. */
289 ggc_free (associate_gnat_to_gnu);
290 associate_gnat_to_gnu = NULL;
291
292 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
293 ggc_free (dummy_node_table);
294 dummy_node_table = NULL;
295
296 /* Destroy the hash table of padded types. */
297 pad_type_hash_table->empty ();
298 pad_type_hash_table = NULL;
299 }
300 \f
301 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
302 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
303 If NO_CHECK is true, the latter check is suppressed.
304
305 If GNU_DECL is zero, reset a previous association. */
306
307 void
308 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
309 {
310 /* Check that GNAT_ENTITY is not already defined and that it is being set
311 to something which is a decl. If that is not the case, this usually
312 means GNAT_ENTITY is defined twice, but occasionally is due to some
313 Gigi problem. */
314 gcc_assert (!(gnu_decl
315 && (PRESENT_GNU_TREE (gnat_entity)
316 || (!no_check && !DECL_P (gnu_decl)))));
317
318 SET_GNU_TREE (gnat_entity, gnu_decl);
319 }
320
321 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
322 that was associated with it. If there is no such tree node, abort.
323
324 In some cases, such as delayed elaboration or expressions that need to
325 be elaborated only once, GNAT_ENTITY is really not an entity. */
326
327 tree
328 get_gnu_tree (Entity_Id gnat_entity)
329 {
330 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
331 return GET_GNU_TREE (gnat_entity);
332 }
333
334 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
335
336 bool
337 present_gnu_tree (Entity_Id gnat_entity)
338 {
339 return PRESENT_GNU_TREE (gnat_entity);
340 }
341 \f
342 /* Make a dummy type corresponding to GNAT_TYPE. */
343
344 tree
345 make_dummy_type (Entity_Id gnat_type)
346 {
347 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
348 tree gnu_type;
349
350 /* If there was no equivalent type (can only happen when just annotating
351 types) or underlying type, go back to the original type. */
352 if (No (gnat_equiv))
353 gnat_equiv = gnat_type;
354
355 /* If it there already a dummy type, use that one. Else make one. */
356 if (PRESENT_DUMMY_NODE (gnat_equiv))
357 return GET_DUMMY_NODE (gnat_equiv);
358
359 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
360 an ENUMERAL_TYPE. */
361 gnu_type = make_node (Is_Record_Type (gnat_equiv)
362 ? tree_code_for_record_type (gnat_equiv)
363 : ENUMERAL_TYPE);
364 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
365 TYPE_DUMMY_P (gnu_type) = 1;
366 TYPE_STUB_DECL (gnu_type)
367 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
368 if (Is_By_Reference_Type (gnat_equiv))
369 TYPE_BY_REFERENCE_P (gnu_type) = 1;
370
371 SET_DUMMY_NODE (gnat_equiv, gnu_type);
372
373 return gnu_type;
374 }
375
376 /* Return the dummy type that was made for GNAT_TYPE, if any. */
377
378 tree
379 get_dummy_type (Entity_Id gnat_type)
380 {
381 return GET_DUMMY_NODE (gnat_type);
382 }
383
384 /* Build dummy fat and thin pointer types whose designated type is specified
385 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
386
387 void
388 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
389 {
390 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
391 tree gnu_fat_type, fields, gnu_object_type;
392
393 gnu_template_type = make_node (RECORD_TYPE);
394 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
395 TYPE_DUMMY_P (gnu_template_type) = 1;
396 gnu_ptr_template = build_pointer_type (gnu_template_type);
397
398 gnu_array_type = make_node (ENUMERAL_TYPE);
399 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
400 TYPE_DUMMY_P (gnu_array_type) = 1;
401 gnu_ptr_array = build_pointer_type (gnu_array_type);
402
403 gnu_fat_type = make_node (RECORD_TYPE);
404 /* Build a stub DECL to trigger the special processing for fat pointer types
405 in gnat_pushdecl. */
406 TYPE_NAME (gnu_fat_type)
407 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
408 gnu_fat_type);
409 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
410 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
411 DECL_CHAIN (fields)
412 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
413 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
414 finish_fat_pointer_type (gnu_fat_type, fields);
415 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
416 /* Suppress debug info until after the type is completed. */
417 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
418
419 gnu_object_type = make_node (RECORD_TYPE);
420 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
421 TYPE_DUMMY_P (gnu_object_type) = 1;
422
423 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
424 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
425 }
426 \f
427 /* Return true if we are in the global binding level. */
428
429 bool
430 global_bindings_p (void)
431 {
432 return force_global || !current_function_decl;
433 }
434
435 /* Enter a new binding level. */
436
437 void
438 gnat_pushlevel (void)
439 {
440 struct gnat_binding_level *newlevel = NULL;
441
442 /* Reuse a struct for this binding level, if there is one. */
443 if (free_binding_level)
444 {
445 newlevel = free_binding_level;
446 free_binding_level = free_binding_level->chain;
447 }
448 else
449 newlevel = ggc_alloc<gnat_binding_level> ();
450
451 /* Use a free BLOCK, if any; otherwise, allocate one. */
452 if (free_block_chain)
453 {
454 newlevel->block = free_block_chain;
455 free_block_chain = BLOCK_CHAIN (free_block_chain);
456 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
457 }
458 else
459 newlevel->block = make_node (BLOCK);
460
461 /* Point the BLOCK we just made to its parent. */
462 if (current_binding_level)
463 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
464
465 BLOCK_VARS (newlevel->block) = NULL_TREE;
466 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
467 TREE_USED (newlevel->block) = 1;
468
469 /* Add this level to the front of the chain (stack) of active levels. */
470 newlevel->chain = current_binding_level;
471 newlevel->jmpbuf_decl = NULL_TREE;
472 current_binding_level = newlevel;
473 }
474
475 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
476 and point FNDECL to this BLOCK. */
477
478 void
479 set_current_block_context (tree fndecl)
480 {
481 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
482 DECL_INITIAL (fndecl) = current_binding_level->block;
483 set_block_for_group (current_binding_level->block);
484 }
485
486 /* Set the jmpbuf_decl for the current binding level to DECL. */
487
488 void
489 set_block_jmpbuf_decl (tree decl)
490 {
491 current_binding_level->jmpbuf_decl = decl;
492 }
493
494 /* Get the jmpbuf_decl, if any, for the current binding level. */
495
496 tree
497 get_block_jmpbuf_decl (void)
498 {
499 return current_binding_level->jmpbuf_decl;
500 }
501
502 /* Exit a binding level. Set any BLOCK into the current code group. */
503
504 void
505 gnat_poplevel (void)
506 {
507 struct gnat_binding_level *level = current_binding_level;
508 tree block = level->block;
509
510 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
511 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
512
513 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
514 are no variables free the block and merge its subblocks into those of its
515 parent block. Otherwise, add it to the list of its parent. */
516 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
517 ;
518 else if (!BLOCK_VARS (block))
519 {
520 BLOCK_SUBBLOCKS (level->chain->block)
521 = block_chainon (BLOCK_SUBBLOCKS (block),
522 BLOCK_SUBBLOCKS (level->chain->block));
523 BLOCK_CHAIN (block) = free_block_chain;
524 free_block_chain = block;
525 }
526 else
527 {
528 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
529 BLOCK_SUBBLOCKS (level->chain->block) = block;
530 TREE_USED (block) = 1;
531 set_block_for_group (block);
532 }
533
534 /* Free this binding structure. */
535 current_binding_level = level->chain;
536 level->chain = free_binding_level;
537 free_binding_level = level;
538 }
539
540 /* Exit a binding level and discard the associated BLOCK. */
541
542 void
543 gnat_zaplevel (void)
544 {
545 struct gnat_binding_level *level = current_binding_level;
546 tree block = level->block;
547
548 BLOCK_CHAIN (block) = free_block_chain;
549 free_block_chain = block;
550
551 /* Free this binding structure. */
552 current_binding_level = level->chain;
553 level->chain = free_binding_level;
554 free_binding_level = level;
555 }
556 \f
557 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
558
559 static void
560 gnat_set_type_context (tree type, tree context)
561 {
562 tree decl = TYPE_STUB_DECL (type);
563
564 TYPE_CONTEXT (type) = context;
565
566 while (decl && DECL_PARALLEL_TYPE (decl))
567 {
568 tree parallel_type = DECL_PARALLEL_TYPE (decl);
569
570 /* Give a context to the parallel types and their stub decl, if any.
571 Some parallel types seems to be present in multiple parallel type
572 chains, so don't mess with their context if they already have one. */
573 if (!TYPE_CONTEXT (parallel_type))
574 {
575 if (TYPE_STUB_DECL (parallel_type))
576 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
577 TYPE_CONTEXT (parallel_type) = context;
578 }
579
580 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
581 }
582 }
583
584 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
585 the debug info, or Empty if there is no such scope. If not NULL, set
586 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
587
588 Entity_Id
589 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
590 {
591 Entity_Id gnat_entity;
592
593 if (is_subprogram)
594 *is_subprogram = false;
595
596 if (Nkind (gnat_node) == N_Defining_Identifier
597 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
598 gnat_entity = Scope (gnat_node);
599 else
600 return Empty;
601
602 while (Present (gnat_entity))
603 {
604 switch (Ekind (gnat_entity))
605 {
606 case E_Function:
607 case E_Procedure:
608 if (Present (Protected_Body_Subprogram (gnat_entity)))
609 gnat_entity = Protected_Body_Subprogram (gnat_entity);
610
611 /* If the scope is a subprogram, then just rely on
612 current_function_decl, so that we don't have to defer
613 anything. This is needed because other places rely on the
614 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
615 if (is_subprogram)
616 *is_subprogram = true;
617 return gnat_entity;
618
619 case E_Record_Type:
620 case E_Record_Subtype:
621 return gnat_entity;
622
623 default:
624 /* By default, we are not interested in this particular scope: go to
625 the outer one. */
626 break;
627 }
628
629 gnat_entity = Scope (gnat_entity);
630 }
631
632 return Empty;
633 }
634
635 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
636 of N otherwise. */
637
638 static void
639 defer_or_set_type_context (tree type, tree context,
640 struct deferred_decl_context_node *n)
641 {
642 if (n)
643 add_deferred_type_context (n, type);
644 else
645 gnat_set_type_context (type, context);
646 }
647
648 /* Return global_context, but create it first if need be. */
649
650 static tree
651 get_global_context (void)
652 {
653 if (!global_context)
654 {
655 global_context = build_translation_unit_decl (NULL_TREE);
656 debug_hooks->register_main_translation_unit (global_context);
657 }
658
659 return global_context;
660 }
661
662 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
663 for location information and flag propagation. */
664
665 void
666 gnat_pushdecl (tree decl, Node_Id gnat_node)
667 {
668 tree context = NULL_TREE;
669 struct deferred_decl_context_node *deferred_decl_context = NULL;
670
671 /* If explicitely asked to make DECL global or if it's an imported nested
672 object, short-circuit the regular Scope-based context computation. */
673 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
674 {
675 /* Rely on the GNAT scope, or fallback to the current_function_decl if
676 the GNAT scope reached the global scope, if it reached a subprogram
677 or the declaration is a subprogram or a variable (for them we skip
678 intermediate context types because the subprogram body elaboration
679 machinery and the inliner both expect a subprogram context).
680
681 Falling back to current_function_decl is necessary for implicit
682 subprograms created by gigi, such as the elaboration subprograms. */
683 bool context_is_subprogram = false;
684 const Entity_Id gnat_scope
685 = get_debug_scope (gnat_node, &context_is_subprogram);
686
687 if (Present (gnat_scope)
688 && !context_is_subprogram
689 && TREE_CODE (decl) != FUNCTION_DECL
690 && TREE_CODE (decl) != VAR_DECL)
691 /* Always assume the scope has not been elaborated, thus defer the
692 context propagation to the time its elaboration will be
693 available. */
694 deferred_decl_context
695 = add_deferred_decl_context (decl, gnat_scope, force_global);
696
697 /* External declarations (when force_global > 0) may not be in a
698 local context. */
699 else if (current_function_decl && force_global == 0)
700 context = current_function_decl;
701 }
702
703 /* If either we are forced to be in global mode or if both the GNAT scope and
704 the current_function_decl did not help in determining the context, use the
705 global scope. */
706 if (!deferred_decl_context && !context)
707 context = get_global_context ();
708
709 /* Functions imported in another function are not really nested.
710 For really nested functions mark them initially as needing
711 a static chain for uses of that flag before unnesting;
712 lower_nested_functions will then recompute it. */
713 if (TREE_CODE (decl) == FUNCTION_DECL
714 && !TREE_PUBLIC (decl)
715 && context
716 && (TREE_CODE (context) == FUNCTION_DECL
717 || decl_function_context (context)))
718 DECL_STATIC_CHAIN (decl) = 1;
719
720 if (!deferred_decl_context)
721 DECL_CONTEXT (decl) = context;
722
723 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
724
725 /* Set the location of DECL and emit a declaration for it. */
726 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
727 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
728
729 add_decl_expr (decl, gnat_node);
730
731 /* Put the declaration on the list. The list of declarations is in reverse
732 order. The list will be reversed later. Put global declarations in the
733 globals list and local ones in the current block. But skip TYPE_DECLs
734 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
735 with the debugger and aren't needed anyway. */
736 if (!(TREE_CODE (decl) == TYPE_DECL
737 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
738 {
739 if (DECL_EXTERNAL (decl))
740 {
741 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
742 vec_safe_push (builtin_decls, decl);
743 }
744 else if (global_bindings_p ())
745 vec_safe_push (global_decls, decl);
746 else
747 {
748 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
749 BLOCK_VARS (current_binding_level->block) = decl;
750 }
751 }
752
753 /* For the declaration of a type, set its name either if it isn't already
754 set or if the previous type name was not derived from a source name.
755 We'd rather have the type named with a real name and all the pointer
756 types to the same object have the same node, except when the names are
757 both derived from source names. */
758 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
759 {
760 tree t = TREE_TYPE (decl);
761
762 /* Array and pointer types aren't tagged types in the C sense so we need
763 to generate a typedef in DWARF for them and make sure it is preserved,
764 unless the type is artificial. */
765 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
766 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
767 || DECL_ARTIFICIAL (decl)))
768 ;
769 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
770 generate the typedef in DWARF. Also do that for fat pointer types
771 because, even though they are tagged types in the C sense, they are
772 still XUP types attached to the base array type at this point. */
773 else if (!DECL_ARTIFICIAL (decl)
774 && (TREE_CODE (t) == ARRAY_TYPE
775 || TREE_CODE (t) == POINTER_TYPE
776 || TYPE_IS_FAT_POINTER_P (t)))
777 {
778 tree tt;
779 /* ??? Copy and original type are not supposed to be variant but we
780 really need a variant for the placeholder machinery to work. */
781 if (TYPE_IS_FAT_POINTER_P (t))
782 tt = build_variant_type_copy (t);
783 else
784 {
785 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
786 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
787 if (TREE_CODE (t) == POINTER_TYPE)
788 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
789 tt = build_qualified_type (tt, TYPE_QUALS (t));
790 }
791 TYPE_NAME (tt) = decl;
792 defer_or_set_type_context (tt,
793 DECL_CONTEXT (decl),
794 deferred_decl_context);
795 TREE_USED (tt) = TREE_USED (t);
796 TREE_TYPE (decl) = tt;
797 if (TYPE_NAME (t)
798 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
799 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
800 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
801 else
802 DECL_ORIGINAL_TYPE (decl) = t;
803 /* Array types need to have a name so that they can be related to
804 their GNAT encodings. */
805 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
806 TYPE_NAME (t) = DECL_NAME (decl);
807 t = NULL_TREE;
808 }
809 else if (TYPE_NAME (t)
810 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
811 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
812 ;
813 else
814 t = NULL_TREE;
815
816 /* Propagate the name to all the variants, this is needed for the type
817 qualifiers machinery to work properly (see check_qualified_type).
818 Also propagate the context to them. Note that it will be propagated
819 to all parallel types too thanks to gnat_set_type_context. */
820 if (t)
821 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
822 /* ??? Because of the previous kludge, we can have variants of fat
823 pointer types with different names. */
824 if (!(TYPE_IS_FAT_POINTER_P (t)
825 && TYPE_NAME (t)
826 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
827 {
828 TYPE_NAME (t) = decl;
829 defer_or_set_type_context (t,
830 DECL_CONTEXT (decl),
831 deferred_decl_context);
832 }
833 }
834 }
835 \f
836 /* Create a record type that contains a SIZE bytes long field of TYPE with a
837 starting bit position so that it is aligned to ALIGN bits, and leaving at
838 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
839 record is guaranteed to get. GNAT_NODE is used for the position of the
840 associated TYPE_DECL. */
841
842 tree
843 make_aligning_type (tree type, unsigned int align, tree size,
844 unsigned int base_align, int room, Node_Id gnat_node)
845 {
846 /* We will be crafting a record type with one field at a position set to be
847 the next multiple of ALIGN past record'address + room bytes. We use a
848 record placeholder to express record'address. */
849 tree record_type = make_node (RECORD_TYPE);
850 tree record = build0 (PLACEHOLDER_EXPR, record_type);
851
852 tree record_addr_st
853 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
854
855 /* The diagram below summarizes the shape of what we manipulate:
856
857 <--------- pos ---------->
858 { +------------+-------------+-----------------+
859 record =>{ |############| ... | field (type) |
860 { +------------+-------------+-----------------+
861 |<-- room -->|<- voffset ->|<---- size ----->|
862 o o
863 | |
864 record_addr vblock_addr
865
866 Every length is in sizetype bytes there, except "pos" which has to be
867 set as a bit position in the GCC tree for the record. */
868 tree room_st = size_int (room);
869 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
870 tree voffset_st, pos, field;
871
872 tree name = TYPE_IDENTIFIER (type);
873
874 name = concat_name (name, "ALIGN");
875 TYPE_NAME (record_type) = name;
876
877 /* Compute VOFFSET and then POS. The next byte position multiple of some
878 alignment after some address is obtained by "and"ing the alignment minus
879 1 with the two's complement of the address. */
880 voffset_st = size_binop (BIT_AND_EXPR,
881 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
882 size_int ((align / BITS_PER_UNIT) - 1));
883
884 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
885 pos = size_binop (MULT_EXPR,
886 convert (bitsizetype,
887 size_binop (PLUS_EXPR, room_st, voffset_st)),
888 bitsize_unit_node);
889
890 /* Craft the GCC record representation. We exceptionally do everything
891 manually here because 1) our generic circuitry is not quite ready to
892 handle the complex position/size expressions we are setting up, 2) we
893 have a strong simplifying factor at hand: we know the maximum possible
894 value of voffset, and 3) we have to set/reset at least the sizes in
895 accordance with this maximum value anyway, as we need them to convey
896 what should be "alloc"ated for this type.
897
898 Use -1 as the 'addressable' indication for the field to prevent the
899 creation of a bitfield. We don't need one, it would have damaging
900 consequences on the alignment computation, and create_field_decl would
901 make one without this special argument, for instance because of the
902 complex position expression. */
903 field = create_field_decl (get_identifier ("F"), type, record_type, size,
904 pos, 1, -1);
905 TYPE_FIELDS (record_type) = field;
906
907 TYPE_ALIGN (record_type) = base_align;
908 TYPE_USER_ALIGN (record_type) = 1;
909
910 TYPE_SIZE (record_type)
911 = size_binop (PLUS_EXPR,
912 size_binop (MULT_EXPR, convert (bitsizetype, size),
913 bitsize_unit_node),
914 bitsize_int (align + room * BITS_PER_UNIT));
915 TYPE_SIZE_UNIT (record_type)
916 = size_binop (PLUS_EXPR, size,
917 size_int (room + align / BITS_PER_UNIT));
918
919 SET_TYPE_MODE (record_type, BLKmode);
920 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
921
922 /* Declare it now since it will never be declared otherwise. This is
923 necessary to ensure that its subtrees are properly marked. */
924 create_type_decl (name, record_type, true, false, gnat_node);
925
926 return record_type;
927 }
928
929 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
930 as the field type of a packed record if IN_RECORD is true, or as the
931 component type of a packed array if IN_RECORD is false. See if we can
932 rewrite it either as a type that has a non-BLKmode, which we can pack
933 tighter in the packed record case, or as a smaller type. If so, return
934 the new type. If not, return the original type. */
935
936 tree
937 make_packable_type (tree type, bool in_record)
938 {
939 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
940 unsigned HOST_WIDE_INT new_size;
941 tree new_type, old_field, field_list = NULL_TREE;
942 unsigned int align;
943
944 /* No point in doing anything if the size is zero. */
945 if (size == 0)
946 return type;
947
948 new_type = make_node (TREE_CODE (type));
949
950 /* Copy the name and flags from the old type to that of the new.
951 Note that we rely on the pointer equality created here for
952 TYPE_NAME to look through conversions in various places. */
953 TYPE_NAME (new_type) = TYPE_NAME (type);
954 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
955 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
956 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
957 if (TREE_CODE (type) == RECORD_TYPE)
958 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
959
960 /* If we are in a record and have a small size, set the alignment to
961 try for an integral mode. Otherwise set it to try for a smaller
962 type with BLKmode. */
963 if (in_record && size <= MAX_FIXED_MODE_SIZE)
964 {
965 align = ceil_pow2 (size);
966 TYPE_ALIGN (new_type) = align;
967 new_size = (size + align - 1) & -align;
968 }
969 else
970 {
971 unsigned HOST_WIDE_INT align;
972
973 /* Do not try to shrink the size if the RM size is not constant. */
974 if (TYPE_CONTAINS_TEMPLATE_P (type)
975 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
976 return type;
977
978 /* Round the RM size up to a unit boundary to get the minimal size
979 for a BLKmode record. Give up if it's already the size. */
980 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
981 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
982 if (new_size == size)
983 return type;
984
985 align = new_size & -new_size;
986 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
987 }
988
989 TYPE_USER_ALIGN (new_type) = 1;
990
991 /* Now copy the fields, keeping the position and size as we don't want
992 to change the layout by propagating the packedness downwards. */
993 for (old_field = TYPE_FIELDS (type); old_field;
994 old_field = DECL_CHAIN (old_field))
995 {
996 tree new_field_type = TREE_TYPE (old_field);
997 tree new_field, new_size;
998
999 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1000 && !TYPE_FAT_POINTER_P (new_field_type)
1001 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1002 new_field_type = make_packable_type (new_field_type, true);
1003
1004 /* However, for the last field in a not already packed record type
1005 that is of an aggregate type, we need to use the RM size in the
1006 packable version of the record type, see finish_record_type. */
1007 if (!DECL_CHAIN (old_field)
1008 && !TYPE_PACKED (type)
1009 && RECORD_OR_UNION_TYPE_P (new_field_type)
1010 && !TYPE_FAT_POINTER_P (new_field_type)
1011 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1012 && TYPE_ADA_SIZE (new_field_type))
1013 new_size = TYPE_ADA_SIZE (new_field_type);
1014 else
1015 new_size = DECL_SIZE (old_field);
1016
1017 new_field
1018 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1019 new_size, bit_position (old_field),
1020 TYPE_PACKED (type),
1021 !DECL_NONADDRESSABLE_P (old_field));
1022
1023 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1024 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1025 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1026 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1027
1028 DECL_CHAIN (new_field) = field_list;
1029 field_list = new_field;
1030 }
1031
1032 finish_record_type (new_type, nreverse (field_list), 2, false);
1033 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1034 if (TYPE_STUB_DECL (type))
1035 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1036 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1037
1038 /* If this is a padding record, we never want to make the size smaller
1039 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1040 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1041 {
1042 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1043 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1044 new_size = size;
1045 }
1046 else
1047 {
1048 TYPE_SIZE (new_type) = bitsize_int (new_size);
1049 TYPE_SIZE_UNIT (new_type)
1050 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1051 }
1052
1053 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1054 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1055
1056 compute_record_mode (new_type);
1057
1058 /* Try harder to get a packable type if necessary, for example
1059 in case the record itself contains a BLKmode field. */
1060 if (in_record && TYPE_MODE (new_type) == BLKmode)
1061 SET_TYPE_MODE (new_type,
1062 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1063
1064 /* If neither the mode nor the size has shrunk, return the old type. */
1065 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1066 return type;
1067
1068 return new_type;
1069 }
1070
1071 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1072 If TYPE is the best type, return it. Otherwise, make a new type. We
1073 only support new integral and pointer types. FOR_BIASED is true if
1074 we are making a biased type. */
1075
1076 tree
1077 make_type_from_size (tree type, tree size_tree, bool for_biased)
1078 {
1079 unsigned HOST_WIDE_INT size;
1080 bool biased_p;
1081 tree new_type;
1082
1083 /* If size indicates an error, just return TYPE to avoid propagating
1084 the error. Likewise if it's too large to represent. */
1085 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1086 return type;
1087
1088 size = tree_to_uhwi (size_tree);
1089
1090 switch (TREE_CODE (type))
1091 {
1092 case INTEGER_TYPE:
1093 case ENUMERAL_TYPE:
1094 case BOOLEAN_TYPE:
1095 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1096 && TYPE_BIASED_REPRESENTATION_P (type));
1097
1098 /* Integer types with precision 0 are forbidden. */
1099 if (size == 0)
1100 size = 1;
1101
1102 /* Only do something if the type isn't a packed array type and doesn't
1103 already have the proper size and the size isn't too large. */
1104 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1105 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1106 || size > LONG_LONG_TYPE_SIZE)
1107 break;
1108
1109 biased_p |= for_biased;
1110 if (TYPE_UNSIGNED (type) || biased_p)
1111 new_type = make_unsigned_type (size);
1112 else
1113 new_type = make_signed_type (size);
1114 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1115 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1116 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1117 /* Copy the name to show that it's essentially the same type and
1118 not a subrange type. */
1119 TYPE_NAME (new_type) = TYPE_NAME (type);
1120 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1121 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1122 return new_type;
1123
1124 case RECORD_TYPE:
1125 /* Do something if this is a fat pointer, in which case we
1126 may need to return the thin pointer. */
1127 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1128 {
1129 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1130 if (!targetm.valid_pointer_mode (p_mode))
1131 p_mode = ptr_mode;
1132 return
1133 build_pointer_type_for_mode
1134 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1135 p_mode, 0);
1136 }
1137 break;
1138
1139 case POINTER_TYPE:
1140 /* Only do something if this is a thin pointer, in which case we
1141 may need to return the fat pointer. */
1142 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1143 return
1144 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1145 break;
1146
1147 default:
1148 break;
1149 }
1150
1151 return type;
1152 }
1153
1154 /* See if the data pointed to by the hash table slot is marked. */
1155
1156 int
1157 pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
1158 {
1159 return ggc_marked_p (t->type);
1160 }
1161
1162 /* Return true iff the padded types are equivalent. */
1163
1164 bool
1165 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1166 {
1167 tree type1, type2;
1168
1169 if (t1->hash != t2->hash)
1170 return 0;
1171
1172 type1 = t1->type;
1173 type2 = t2->type;
1174
1175 /* We consider that the padded types are equivalent if they pad the same type
1176 and have the same size, alignment, RM size and storage order. Taking the
1177 mode into account is redundant since it is determined by the others. */
1178 return
1179 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1180 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1181 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1182 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1183 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1184 }
1185
1186 /* Look up the padded TYPE in the hash table and return its canonical version
1187 if it exists; otherwise, insert it into the hash table. */
1188
1189 static tree
1190 lookup_and_insert_pad_type (tree type)
1191 {
1192 hashval_t hashcode;
1193 struct pad_type_hash in, *h;
1194
1195 hashcode
1196 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1197 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1198 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1199 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1200
1201 in.hash = hashcode;
1202 in.type = type;
1203 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1204 if (h)
1205 return h->type;
1206
1207 h = ggc_alloc<pad_type_hash> ();
1208 h->hash = hashcode;
1209 h->type = type;
1210 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1211 return NULL_TREE;
1212 }
1213
1214 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1215 if needed. We have already verified that SIZE and ALIGN are large enough.
1216 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1217 IS_COMPONENT_TYPE is true if this is being done for the component type of
1218 an array. IS_USER_TYPE is true if the original type needs to be completed.
1219 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1220 the RM size of the resulting type is to be set to SIZE too. */
1221
1222 tree
1223 maybe_pad_type (tree type, tree size, unsigned int align,
1224 Entity_Id gnat_entity, bool is_component_type,
1225 bool is_user_type, bool definition, bool set_rm_size)
1226 {
1227 tree orig_size = TYPE_SIZE (type);
1228 unsigned int orig_align = TYPE_ALIGN (type);
1229 tree record, field;
1230
1231 /* If TYPE is a padded type, see if it agrees with any size and alignment
1232 we were given. If so, return the original type. Otherwise, strip
1233 off the padding, since we will either be returning the inner type
1234 or repadding it. If no size or alignment is specified, use that of
1235 the original padded type. */
1236 if (TYPE_IS_PADDING_P (type))
1237 {
1238 if ((!size
1239 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1240 && (align == 0 || align == orig_align))
1241 return type;
1242
1243 if (!size)
1244 size = orig_size;
1245 if (align == 0)
1246 align = orig_align;
1247
1248 type = TREE_TYPE (TYPE_FIELDS (type));
1249 orig_size = TYPE_SIZE (type);
1250 orig_align = TYPE_ALIGN (type);
1251 }
1252
1253 /* If the size is either not being changed or is being made smaller (which
1254 is not done here and is only valid for bitfields anyway), show the size
1255 isn't changing. Likewise, clear the alignment if it isn't being
1256 changed. Then return if we aren't doing anything. */
1257 if (size
1258 && (operand_equal_p (size, orig_size, 0)
1259 || (TREE_CODE (orig_size) == INTEGER_CST
1260 && tree_int_cst_lt (size, orig_size))))
1261 size = NULL_TREE;
1262
1263 if (align == orig_align)
1264 align = 0;
1265
1266 if (align == 0 && !size)
1267 return type;
1268
1269 /* If requested, complete the original type and give it a name. */
1270 if (is_user_type)
1271 create_type_decl (get_entity_name (gnat_entity), type,
1272 !Comes_From_Source (gnat_entity),
1273 !(TYPE_NAME (type)
1274 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1275 && DECL_IGNORED_P (TYPE_NAME (type))),
1276 gnat_entity);
1277
1278 /* We used to modify the record in place in some cases, but that could
1279 generate incorrect debugging information. So make a new record
1280 type and name. */
1281 record = make_node (RECORD_TYPE);
1282 TYPE_PADDING_P (record) = 1;
1283 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1284 SET_TYPE_DEBUG_TYPE (record, type);
1285
1286 /* ??? Padding types around packed array implementation types will be
1287 considered as root types in the array descriptor language hook (see
1288 gnat_get_array_descr_info). Give them the original packed array type
1289 name so that the one coming from sources appears in the debugging
1290 information. */
1291 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1292 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1293 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1294 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1295 else if (Present (gnat_entity))
1296 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1297
1298 TYPE_ALIGN (record) = align ? align : orig_align;
1299 TYPE_SIZE (record) = size ? size : orig_size;
1300 TYPE_SIZE_UNIT (record)
1301 = convert (sizetype,
1302 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1303 bitsize_unit_node));
1304
1305 /* If we are changing the alignment and the input type is a record with
1306 BLKmode and a small constant size, try to make a form that has an
1307 integral mode. This might allow the padding record to also have an
1308 integral mode, which will be much more efficient. There is no point
1309 in doing so if a size is specified unless it is also a small constant
1310 size and it is incorrect to do so if we cannot guarantee that the mode
1311 will be naturally aligned since the field must always be addressable.
1312
1313 ??? This might not always be a win when done for a stand-alone object:
1314 since the nominal and the effective type of the object will now have
1315 different modes, a VIEW_CONVERT_EXPR will be required for converting
1316 between them and it might be hard to overcome afterwards, including
1317 at the RTL level when the stand-alone object is accessed as a whole. */
1318 if (align != 0
1319 && RECORD_OR_UNION_TYPE_P (type)
1320 && TYPE_MODE (type) == BLKmode
1321 && !TYPE_BY_REFERENCE_P (type)
1322 && TREE_CODE (orig_size) == INTEGER_CST
1323 && !TREE_OVERFLOW (orig_size)
1324 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1325 && (!size
1326 || (TREE_CODE (size) == INTEGER_CST
1327 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1328 {
1329 tree packable_type = make_packable_type (type, true);
1330 if (TYPE_MODE (packable_type) != BLKmode
1331 && align >= TYPE_ALIGN (packable_type))
1332 type = packable_type;
1333 }
1334
1335 /* Now create the field with the original size. */
1336 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1337 bitsize_zero_node, 0, 1);
1338 DECL_INTERNAL_P (field) = 1;
1339
1340 /* Do not emit debug info until after the auxiliary record is built. */
1341 finish_record_type (record, field, 1, false);
1342
1343 /* Set the RM size if requested. */
1344 if (set_rm_size)
1345 {
1346 tree canonical_pad_type;
1347
1348 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1349
1350 /* If the padded type is complete and has constant size, we canonicalize
1351 it by means of the hash table. This is consistent with the language
1352 semantics and ensures that gigi and the middle-end have a common view
1353 of these padded types. */
1354 if (TREE_CONSTANT (TYPE_SIZE (record))
1355 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1356 {
1357 record = canonical_pad_type;
1358 goto built;
1359 }
1360 }
1361
1362 /* Unless debugging information isn't being written for the input type,
1363 write a record that shows what we are a subtype of and also make a
1364 variable that indicates our size, if still variable. */
1365 if (TREE_CODE (orig_size) != INTEGER_CST
1366 && TYPE_NAME (record)
1367 && TYPE_NAME (type)
1368 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1369 && DECL_IGNORED_P (TYPE_NAME (type))))
1370 {
1371 tree name = TYPE_IDENTIFIER (record);
1372 tree size_unit = TYPE_SIZE_UNIT (record);
1373
1374 /* A variable that holds the size is required even with no encoding since
1375 it will be referenced by debugging information attributes. At global
1376 level, we need a single variable across all translation units. */
1377 if (size
1378 && TREE_CODE (size) != INTEGER_CST
1379 && (definition || global_bindings_p ()))
1380 {
1381 /* Whether or not gnat_entity comes from source, this XVZ variable is
1382 is a compilation artifact. */
1383 size_unit
1384 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1385 size_unit, true, global_bindings_p (),
1386 !definition && global_bindings_p (), false,
1387 false, true, true, NULL, gnat_entity);
1388 TYPE_SIZE_UNIT (record) = size_unit;
1389 }
1390
1391 /* There is no need to show what we are a subtype of when outputting as
1392 few encodings as possible: regular debugging infomation makes this
1393 redundant. */
1394 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1395 {
1396 tree marker = make_node (RECORD_TYPE);
1397 tree orig_name = TYPE_IDENTIFIER (type);
1398
1399 TYPE_NAME (marker) = concat_name (name, "XVS");
1400 finish_record_type (marker,
1401 create_field_decl (orig_name,
1402 build_reference_type (type),
1403 marker, NULL_TREE, NULL_TREE,
1404 0, 0),
1405 0, true);
1406 TYPE_SIZE_UNIT (marker) = size_unit;
1407
1408 add_parallel_type (record, marker);
1409 }
1410 }
1411
1412 rest_of_record_type_compilation (record);
1413
1414 built:
1415 /* If a simple size was explicitly given, maybe issue a warning. */
1416 if (!size
1417 || TREE_CODE (size) == COND_EXPR
1418 || TREE_CODE (size) == MAX_EXPR
1419 || No (gnat_entity))
1420 return record;
1421
1422 /* But don't do it if we are just annotating types and the type is tagged or
1423 concurrent, since these types aren't fully laid out in this mode. */
1424 if (type_annotate_only)
1425 {
1426 Entity_Id gnat_type
1427 = is_component_type
1428 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1429
1430 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1431 return record;
1432 }
1433
1434 /* Take the original size as the maximum size of the input if there was an
1435 unconstrained record involved and round it up to the specified alignment,
1436 if one was specified, but only for aggregate types. */
1437 if (CONTAINS_PLACEHOLDER_P (orig_size))
1438 orig_size = max_size (orig_size, true);
1439
1440 if (align && AGGREGATE_TYPE_P (type))
1441 orig_size = round_up (orig_size, align);
1442
1443 if (!operand_equal_p (size, orig_size, 0)
1444 && !(TREE_CODE (size) == INTEGER_CST
1445 && TREE_CODE (orig_size) == INTEGER_CST
1446 && (TREE_OVERFLOW (size)
1447 || TREE_OVERFLOW (orig_size)
1448 || tree_int_cst_lt (size, orig_size))))
1449 {
1450 Node_Id gnat_error_node = Empty;
1451
1452 /* For a packed array, post the message on the original array type. */
1453 if (Is_Packed_Array_Impl_Type (gnat_entity))
1454 gnat_entity = Original_Array_Type (gnat_entity);
1455
1456 if ((Ekind (gnat_entity) == E_Component
1457 || Ekind (gnat_entity) == E_Discriminant)
1458 && Present (Component_Clause (gnat_entity)))
1459 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1460 else if (Present (Size_Clause (gnat_entity)))
1461 gnat_error_node = Expression (Size_Clause (gnat_entity));
1462
1463 /* Generate message only for entities that come from source, since
1464 if we have an entity created by expansion, the message will be
1465 generated for some other corresponding source entity. */
1466 if (Comes_From_Source (gnat_entity))
1467 {
1468 if (Present (gnat_error_node))
1469 post_error_ne_tree ("{^ }bits of & unused?",
1470 gnat_error_node, gnat_entity,
1471 size_diffop (size, orig_size));
1472 else if (is_component_type)
1473 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1474 gnat_entity, gnat_entity,
1475 size_diffop (size, orig_size));
1476 }
1477 }
1478
1479 return record;
1480 }
1481
1482 /* Return a copy of the padded TYPE but with reverse storage order. */
1483
1484 tree
1485 set_reverse_storage_order_on_pad_type (tree type)
1486 {
1487 tree field, canonical_pad_type;
1488
1489 #ifdef ENABLE_CHECKING
1490 /* If the inner type is not scalar then the function does nothing. */
1491 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1492 gcc_assert (!AGGREGATE_TYPE_P (inner_type) && !VECTOR_TYPE_P (inner_type));
1493 #endif
1494
1495 /* This is required for the canonicalization. */
1496 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1497
1498 field = copy_node (TYPE_FIELDS (type));
1499 type = copy_type (type);
1500 DECL_CONTEXT (field) = type;
1501 TYPE_FIELDS (type) = field;
1502 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1503 canonical_pad_type = lookup_and_insert_pad_type (type);
1504 return canonical_pad_type ? canonical_pad_type : type;
1505 }
1506 \f
1507 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1508 If this is a multi-dimensional array type, do this recursively.
1509
1510 OP may be
1511 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1512 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1513 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1514
1515 void
1516 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1517 {
1518 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1519 of a one-dimensional array, since the padding has the same alias set
1520 as the field type, but if it's a multi-dimensional array, we need to
1521 see the inner types. */
1522 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1523 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1524 || TYPE_PADDING_P (gnu_old_type)))
1525 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1526
1527 /* Unconstrained array types are deemed incomplete and would thus be given
1528 alias set 0. Retrieve the underlying array type. */
1529 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1530 gnu_old_type
1531 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1532 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1533 gnu_new_type
1534 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1535
1536 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1537 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1538 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1539 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1540
1541 switch (op)
1542 {
1543 case ALIAS_SET_COPY:
1544 /* The alias set shouldn't be copied between array types with different
1545 aliasing settings because this can break the aliasing relationship
1546 between the array type and its element type. */
1547 if (flag_checking || flag_strict_aliasing)
1548 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1549 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1550 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1551 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1552
1553 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1554 break;
1555
1556 case ALIAS_SET_SUBSET:
1557 case ALIAS_SET_SUPERSET:
1558 {
1559 alias_set_type old_set = get_alias_set (gnu_old_type);
1560 alias_set_type new_set = get_alias_set (gnu_new_type);
1561
1562 /* Do nothing if the alias sets conflict. This ensures that we
1563 never call record_alias_subset several times for the same pair
1564 or at all for alias set 0. */
1565 if (!alias_sets_conflict_p (old_set, new_set))
1566 {
1567 if (op == ALIAS_SET_SUBSET)
1568 record_alias_subset (old_set, new_set);
1569 else
1570 record_alias_subset (new_set, old_set);
1571 }
1572 }
1573 break;
1574
1575 default:
1576 gcc_unreachable ();
1577 }
1578
1579 record_component_aliases (gnu_new_type);
1580 }
1581 \f
1582 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1583 ARTIFICIAL_P is true if the type was generated by the compiler. */
1584
1585 void
1586 record_builtin_type (const char *name, tree type, bool artificial_p)
1587 {
1588 tree type_decl = build_decl (input_location,
1589 TYPE_DECL, get_identifier (name), type);
1590 DECL_ARTIFICIAL (type_decl) = artificial_p;
1591 TYPE_ARTIFICIAL (type) = artificial_p;
1592 gnat_pushdecl (type_decl, Empty);
1593
1594 if (debug_hooks->type_decl)
1595 debug_hooks->type_decl (type_decl, false);
1596 }
1597 \f
1598 /* Finish constructing the character type CHAR_TYPE.
1599
1600 In Ada character types are enumeration types and, as a consequence, are
1601 represented in the front-end by integral types holding the positions of
1602 the enumeration values as defined by the language, which means that the
1603 integral types are unsigned.
1604
1605 Unfortunately the signedness of 'char' in C is implementation-defined
1606 and GCC even has the option -fsigned-char to toggle it at run time.
1607 Since GNAT's philosophy is to be compatible with C by default, to wit
1608 Interfaces.C.char is defined as a mere copy of Character, we may need
1609 to declare character types as signed types in GENERIC and generate the
1610 necessary adjustments to make them behave as unsigned types.
1611
1612 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1613 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1614 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1615 types. The idea is to ensure that the bit pattern contained in the
1616 Esize'd objects is not changed, even though the numerical value will
1617 be interpreted differently depending on the signedness.
1618
1619 For character types, the bounds are implicit and, therefore, need to
1620 be adjusted. Morever, the debug info needs the unsigned version. */
1621
1622 void
1623 finish_character_type (tree char_type)
1624 {
1625 if (TYPE_UNSIGNED (char_type))
1626 return;
1627
1628 /* Make a copy of a generic unsigned version since we'll modify it. */
1629 tree unsigned_char_type
1630 = (char_type == char_type_node
1631 ? unsigned_char_type_node
1632 : copy_type (gnat_unsigned_type_for (char_type)));
1633
1634 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1635 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1636 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1637
1638 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1639 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1640 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1641 }
1642
1643 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1644 finish constructing the record type as a fat pointer type. */
1645
1646 void
1647 finish_fat_pointer_type (tree record_type, tree field_list)
1648 {
1649 /* Make sure we can put it into a register. */
1650 if (STRICT_ALIGNMENT)
1651 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1652
1653 /* Show what it really is. */
1654 TYPE_FAT_POINTER_P (record_type) = 1;
1655
1656 /* Do not emit debug info for it since the types of its fields may still be
1657 incomplete at this point. */
1658 finish_record_type (record_type, field_list, 0, false);
1659
1660 /* Force type_contains_placeholder_p to return true on it. Although the
1661 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1662 type but the representation of the unconstrained array. */
1663 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1664 }
1665
1666 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1667 finish constructing the record or union type. If REP_LEVEL is zero, this
1668 record has no representation clause and so will be entirely laid out here.
1669 If REP_LEVEL is one, this record has a representation clause and has been
1670 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1671 this record is derived from a parent record and thus inherits its layout;
1672 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1673 we need to write debug information about this type. */
1674
1675 void
1676 finish_record_type (tree record_type, tree field_list, int rep_level,
1677 bool debug_info_p)
1678 {
1679 enum tree_code code = TREE_CODE (record_type);
1680 tree name = TYPE_IDENTIFIER (record_type);
1681 tree ada_size = bitsize_zero_node;
1682 tree size = bitsize_zero_node;
1683 bool had_size = TYPE_SIZE (record_type) != 0;
1684 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1685 bool had_align = TYPE_ALIGN (record_type) != 0;
1686 tree field;
1687
1688 TYPE_FIELDS (record_type) = field_list;
1689
1690 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1691 generate debug info and have a parallel type. */
1692 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1693
1694 /* Globally initialize the record first. If this is a rep'ed record,
1695 that just means some initializations; otherwise, layout the record. */
1696 if (rep_level > 0)
1697 {
1698 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1699
1700 if (!had_size_unit)
1701 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1702
1703 if (!had_size)
1704 TYPE_SIZE (record_type) = bitsize_zero_node;
1705
1706 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1707 out just like a UNION_TYPE, since the size will be fixed. */
1708 else if (code == QUAL_UNION_TYPE)
1709 code = UNION_TYPE;
1710 }
1711 else
1712 {
1713 /* Ensure there isn't a size already set. There can be in an error
1714 case where there is a rep clause but all fields have errors and
1715 no longer have a position. */
1716 TYPE_SIZE (record_type) = 0;
1717
1718 /* Ensure we use the traditional GCC layout for bitfields when we need
1719 to pack the record type or have a representation clause. The other
1720 possible layout (Microsoft C compiler), if available, would prevent
1721 efficient packing in almost all cases. */
1722 #ifdef TARGET_MS_BITFIELD_LAYOUT
1723 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1724 decl_attributes (&record_type,
1725 tree_cons (get_identifier ("gcc_struct"),
1726 NULL_TREE, NULL_TREE),
1727 ATTR_FLAG_TYPE_IN_PLACE);
1728 #endif
1729
1730 layout_type (record_type);
1731 }
1732
1733 /* At this point, the position and size of each field is known. It was
1734 either set before entry by a rep clause, or by laying out the type above.
1735
1736 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1737 to compute the Ada size; the GCC size and alignment (for rep'ed records
1738 that are not padding types); and the mode (for rep'ed records). We also
1739 clear the DECL_BIT_FIELD indication for the cases we know have not been
1740 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1741
1742 if (code == QUAL_UNION_TYPE)
1743 field_list = nreverse (field_list);
1744
1745 for (field = field_list; field; field = DECL_CHAIN (field))
1746 {
1747 tree type = TREE_TYPE (field);
1748 tree pos = bit_position (field);
1749 tree this_size = DECL_SIZE (field);
1750 tree this_ada_size;
1751
1752 if (RECORD_OR_UNION_TYPE_P (type)
1753 && !TYPE_FAT_POINTER_P (type)
1754 && !TYPE_CONTAINS_TEMPLATE_P (type)
1755 && TYPE_ADA_SIZE (type))
1756 this_ada_size = TYPE_ADA_SIZE (type);
1757 else
1758 this_ada_size = this_size;
1759
1760 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1761 if (DECL_BIT_FIELD (field)
1762 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1763 {
1764 unsigned int align = TYPE_ALIGN (type);
1765
1766 /* In the general case, type alignment is required. */
1767 if (value_factor_p (pos, align))
1768 {
1769 /* The enclosing record type must be sufficiently aligned.
1770 Otherwise, if no alignment was specified for it and it
1771 has been laid out already, bump its alignment to the
1772 desired one if this is compatible with its size and
1773 maximum alignment, if any. */
1774 if (TYPE_ALIGN (record_type) >= align)
1775 {
1776 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1777 DECL_BIT_FIELD (field) = 0;
1778 }
1779 else if (!had_align
1780 && rep_level == 0
1781 && value_factor_p (TYPE_SIZE (record_type), align)
1782 && (!TYPE_MAX_ALIGN (record_type)
1783 || TYPE_MAX_ALIGN (record_type) >= align))
1784 {
1785 TYPE_ALIGN (record_type) = align;
1786 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1787 DECL_BIT_FIELD (field) = 0;
1788 }
1789 }
1790
1791 /* In the non-strict alignment case, only byte alignment is. */
1792 if (!STRICT_ALIGNMENT
1793 && DECL_BIT_FIELD (field)
1794 && value_factor_p (pos, BITS_PER_UNIT))
1795 DECL_BIT_FIELD (field) = 0;
1796 }
1797
1798 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1799 field is technically not addressable. Except that it can actually
1800 be addressed if it is BLKmode and happens to be properly aligned. */
1801 if (DECL_BIT_FIELD (field)
1802 && !(DECL_MODE (field) == BLKmode
1803 && value_factor_p (pos, BITS_PER_UNIT)))
1804 DECL_NONADDRESSABLE_P (field) = 1;
1805
1806 /* A type must be as aligned as its most aligned field that is not
1807 a bit-field. But this is already enforced by layout_type. */
1808 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1809 TYPE_ALIGN (record_type)
1810 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1811
1812 switch (code)
1813 {
1814 case UNION_TYPE:
1815 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1816 size = size_binop (MAX_EXPR, size, this_size);
1817 break;
1818
1819 case QUAL_UNION_TYPE:
1820 ada_size
1821 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1822 this_ada_size, ada_size);
1823 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1824 this_size, size);
1825 break;
1826
1827 case RECORD_TYPE:
1828 /* Since we know here that all fields are sorted in order of
1829 increasing bit position, the size of the record is one
1830 higher than the ending bit of the last field processed
1831 unless we have a rep clause, since in that case we might
1832 have a field outside a QUAL_UNION_TYPE that has a higher ending
1833 position. So use a MAX in that case. Also, if this field is a
1834 QUAL_UNION_TYPE, we need to take into account the previous size in
1835 the case of empty variants. */
1836 ada_size
1837 = merge_sizes (ada_size, pos, this_ada_size,
1838 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1839 size
1840 = merge_sizes (size, pos, this_size,
1841 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1842 break;
1843
1844 default:
1845 gcc_unreachable ();
1846 }
1847 }
1848
1849 if (code == QUAL_UNION_TYPE)
1850 nreverse (field_list);
1851
1852 if (rep_level < 2)
1853 {
1854 /* If this is a padding record, we never want to make the size smaller
1855 than what was specified in it, if any. */
1856 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1857 size = TYPE_SIZE (record_type);
1858
1859 /* Now set any of the values we've just computed that apply. */
1860 if (!TYPE_FAT_POINTER_P (record_type)
1861 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1862 SET_TYPE_ADA_SIZE (record_type, ada_size);
1863
1864 if (rep_level > 0)
1865 {
1866 tree size_unit = had_size_unit
1867 ? TYPE_SIZE_UNIT (record_type)
1868 : convert (sizetype,
1869 size_binop (CEIL_DIV_EXPR, size,
1870 bitsize_unit_node));
1871 unsigned int align = TYPE_ALIGN (record_type);
1872
1873 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1874 TYPE_SIZE_UNIT (record_type)
1875 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1876
1877 compute_record_mode (record_type);
1878 }
1879 }
1880
1881 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1882 TYPE_MAX_ALIGN (record_type) = 0;
1883
1884 if (debug_info_p)
1885 rest_of_record_type_compilation (record_type);
1886 }
1887
1888 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1889 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1890 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1891 moment TYPE will get a context. */
1892
1893 void
1894 add_parallel_type (tree type, tree parallel_type)
1895 {
1896 tree decl = TYPE_STUB_DECL (type);
1897
1898 while (DECL_PARALLEL_TYPE (decl))
1899 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1900
1901 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1902
1903 /* If PARALLEL_TYPE already has a context, we are done. */
1904 if (TYPE_CONTEXT (parallel_type))
1905 return;
1906
1907 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1908 it to PARALLEL_TYPE. */
1909 if (TYPE_CONTEXT (type))
1910 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1911
1912 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1913 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1914 so we have nothing to do in this case. */
1915 }
1916
1917 /* Return true if TYPE has a parallel type. */
1918
1919 static bool
1920 has_parallel_type (tree type)
1921 {
1922 tree decl = TYPE_STUB_DECL (type);
1923
1924 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1925 }
1926
1927 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1928 associated with it. It need not be invoked directly in most cases since
1929 finish_record_type takes care of doing so, but this can be necessary if
1930 a parallel type is to be attached to the record type. */
1931
1932 void
1933 rest_of_record_type_compilation (tree record_type)
1934 {
1935 bool var_size = false;
1936 tree field;
1937
1938 /* If this is a padded type, the bulk of the debug info has already been
1939 generated for the field's type. */
1940 if (TYPE_IS_PADDING_P (record_type))
1941 return;
1942
1943 /* If the type already has a parallel type (XVS type), then we're done. */
1944 if (has_parallel_type (record_type))
1945 return;
1946
1947 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1948 {
1949 /* We need to make an XVE/XVU record if any field has variable size,
1950 whether or not the record does. For example, if we have a union,
1951 it may be that all fields, rounded up to the alignment, have the
1952 same size, in which case we'll use that size. But the debug
1953 output routines (except Dwarf2) won't be able to output the fields,
1954 so we need to make the special record. */
1955 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1956 /* If a field has a non-constant qualifier, the record will have
1957 variable size too. */
1958 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1959 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1960 {
1961 var_size = true;
1962 break;
1963 }
1964 }
1965
1966 /* If this record type is of variable size, make a parallel record type that
1967 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1968 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1969 {
1970 tree new_record_type
1971 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1972 ? UNION_TYPE : TREE_CODE (record_type));
1973 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1974 tree last_pos = bitsize_zero_node;
1975 tree old_field, prev_old_field = NULL_TREE;
1976
1977 new_name
1978 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1979 ? "XVU" : "XVE");
1980 TYPE_NAME (new_record_type) = new_name;
1981 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1982 TYPE_STUB_DECL (new_record_type)
1983 = create_type_stub_decl (new_name, new_record_type);
1984 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1985 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1986 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
1987 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1988 TYPE_SIZE_UNIT (new_record_type)
1989 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1990
1991 /* Now scan all the fields, replacing each field with a new field
1992 corresponding to the new encoding. */
1993 for (old_field = TYPE_FIELDS (record_type); old_field;
1994 old_field = DECL_CHAIN (old_field))
1995 {
1996 tree field_type = TREE_TYPE (old_field);
1997 tree field_name = DECL_NAME (old_field);
1998 tree curpos = bit_position (old_field);
1999 tree pos, new_field;
2000 bool var = false;
2001 unsigned int align = 0;
2002
2003 /* We're going to do some pattern matching below so remove as many
2004 conversions as possible. */
2005 curpos = remove_conversions (curpos, true);
2006
2007 /* See how the position was modified from the last position.
2008
2009 There are two basic cases we support: a value was added
2010 to the last position or the last position was rounded to
2011 a boundary and they something was added. Check for the
2012 first case first. If not, see if there is any evidence
2013 of rounding. If so, round the last position and retry.
2014
2015 If this is a union, the position can be taken as zero. */
2016 if (TREE_CODE (new_record_type) == UNION_TYPE)
2017 pos = bitsize_zero_node;
2018 else
2019 pos = compute_related_constant (curpos, last_pos);
2020
2021 if (!pos
2022 && TREE_CODE (curpos) == MULT_EXPR
2023 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2024 {
2025 tree offset = TREE_OPERAND (curpos, 0);
2026 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2027 align = scale_by_factor_of (offset, align);
2028 last_pos = round_up (last_pos, align);
2029 pos = compute_related_constant (curpos, last_pos);
2030 }
2031 else if (!pos
2032 && TREE_CODE (curpos) == PLUS_EXPR
2033 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2034 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2035 && tree_fits_uhwi_p
2036 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2037 {
2038 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2039 unsigned HOST_WIDE_INT addend
2040 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2041 align
2042 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2043 align = scale_by_factor_of (offset, align);
2044 align = MIN (align, addend & -addend);
2045 last_pos = round_up (last_pos, align);
2046 pos = compute_related_constant (curpos, last_pos);
2047 }
2048 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2049 {
2050 align = TYPE_ALIGN (field_type);
2051 last_pos = round_up (last_pos, align);
2052 pos = compute_related_constant (curpos, last_pos);
2053 }
2054
2055 /* If we can't compute a position, set it to zero.
2056
2057 ??? We really should abort here, but it's too much work
2058 to get this correct for all cases. */
2059 if (!pos)
2060 pos = bitsize_zero_node;
2061
2062 /* See if this type is variable-sized and make a pointer type
2063 and indicate the indirection if so. Beware that the debug
2064 back-end may adjust the position computed above according
2065 to the alignment of the field type, i.e. the pointer type
2066 in this case, if we don't preventively counter that. */
2067 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2068 {
2069 field_type = build_pointer_type (field_type);
2070 if (align != 0 && TYPE_ALIGN (field_type) > align)
2071 {
2072 field_type = copy_node (field_type);
2073 TYPE_ALIGN (field_type) = align;
2074 }
2075 var = true;
2076 }
2077
2078 /* Make a new field name, if necessary. */
2079 if (var || align != 0)
2080 {
2081 char suffix[16];
2082
2083 if (align != 0)
2084 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2085 align / BITS_PER_UNIT);
2086 else
2087 strcpy (suffix, "XVL");
2088
2089 field_name = concat_name (field_name, suffix);
2090 }
2091
2092 new_field
2093 = create_field_decl (field_name, field_type, new_record_type,
2094 DECL_SIZE (old_field), pos, 0, 0);
2095 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2096 TYPE_FIELDS (new_record_type) = new_field;
2097
2098 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2099 zero. The only time it's not the last field of the record
2100 is when there are other components at fixed positions after
2101 it (meaning there was a rep clause for every field) and we
2102 want to be able to encode them. */
2103 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2104 (TREE_CODE (TREE_TYPE (old_field))
2105 == QUAL_UNION_TYPE)
2106 ? bitsize_zero_node
2107 : DECL_SIZE (old_field));
2108 prev_old_field = old_field;
2109 }
2110
2111 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2112
2113 add_parallel_type (record_type, new_record_type);
2114 }
2115 }
2116
2117 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2118 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2119 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2120 replace a value of zero with the old size. If HAS_REP is true, we take the
2121 MAX of the end position of this field with LAST_SIZE. In all other cases,
2122 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2123
2124 static tree
2125 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2126 bool has_rep)
2127 {
2128 tree type = TREE_TYPE (last_size);
2129 tree new_size;
2130
2131 if (!special || TREE_CODE (size) != COND_EXPR)
2132 {
2133 new_size = size_binop (PLUS_EXPR, first_bit, size);
2134 if (has_rep)
2135 new_size = size_binop (MAX_EXPR, last_size, new_size);
2136 }
2137
2138 else
2139 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2140 integer_zerop (TREE_OPERAND (size, 1))
2141 ? last_size : merge_sizes (last_size, first_bit,
2142 TREE_OPERAND (size, 1),
2143 1, has_rep),
2144 integer_zerop (TREE_OPERAND (size, 2))
2145 ? last_size : merge_sizes (last_size, first_bit,
2146 TREE_OPERAND (size, 2),
2147 1, has_rep));
2148
2149 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2150 when fed through substitute_in_expr) into thinking that a constant
2151 size is not constant. */
2152 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2153 new_size = TREE_OPERAND (new_size, 0);
2154
2155 return new_size;
2156 }
2157
2158 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2159 related by the addition of a constant. Return that constant if so. */
2160
2161 static tree
2162 compute_related_constant (tree op0, tree op1)
2163 {
2164 tree op0_var, op1_var;
2165 tree op0_con = split_plus (op0, &op0_var);
2166 tree op1_con = split_plus (op1, &op1_var);
2167 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2168
2169 if (operand_equal_p (op0_var, op1_var, 0))
2170 return result;
2171 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2172 return result;
2173 else
2174 return 0;
2175 }
2176
2177 /* Utility function of above to split a tree OP which may be a sum, into a
2178 constant part, which is returned, and a variable part, which is stored
2179 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2180 bitsizetype. */
2181
2182 static tree
2183 split_plus (tree in, tree *pvar)
2184 {
2185 /* Strip conversions in order to ease the tree traversal and maximize the
2186 potential for constant or plus/minus discovery. We need to be careful
2187 to always return and set *pvar to bitsizetype trees, but it's worth
2188 the effort. */
2189 in = remove_conversions (in, false);
2190
2191 *pvar = convert (bitsizetype, in);
2192
2193 if (TREE_CODE (in) == INTEGER_CST)
2194 {
2195 *pvar = bitsize_zero_node;
2196 return convert (bitsizetype, in);
2197 }
2198 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2199 {
2200 tree lhs_var, rhs_var;
2201 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2202 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2203
2204 if (lhs_var == TREE_OPERAND (in, 0)
2205 && rhs_var == TREE_OPERAND (in, 1))
2206 return bitsize_zero_node;
2207
2208 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2209 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2210 }
2211 else
2212 return bitsize_zero_node;
2213 }
2214 \f
2215 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2216 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2217 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2218 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2219 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2220 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2221 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2222 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2223 invisible reference. */
2224
2225 tree
2226 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2227 bool return_unconstrained_p, bool return_by_direct_ref_p,
2228 bool return_by_invisi_ref_p)
2229 {
2230 /* A list of the data type nodes of the subprogram formal parameters.
2231 This list is generated by traversing the input list of PARM_DECL
2232 nodes. */
2233 vec<tree, va_gc> *param_type_list = NULL;
2234 tree t, type;
2235
2236 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2237 vec_safe_push (param_type_list, TREE_TYPE (t));
2238
2239 type = build_function_type_vec (return_type, param_type_list);
2240
2241 /* TYPE may have been shared since GCC hashes types. If it has a different
2242 CICO_LIST, make a copy. Likewise for the various flags. */
2243 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2244 return_by_direct_ref_p, return_by_invisi_ref_p))
2245 {
2246 type = copy_type (type);
2247 TYPE_CI_CO_LIST (type) = cico_list;
2248 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2249 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2250 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2251 }
2252
2253 return type;
2254 }
2255 \f
2256 /* Return a copy of TYPE but safe to modify in any way. */
2257
2258 tree
2259 copy_type (tree type)
2260 {
2261 tree new_type = copy_node (type);
2262
2263 /* Unshare the language-specific data. */
2264 if (TYPE_LANG_SPECIFIC (type))
2265 {
2266 TYPE_LANG_SPECIFIC (new_type) = NULL;
2267 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2268 }
2269
2270 /* And the contents of the language-specific slot if needed. */
2271 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2272 && TYPE_RM_VALUES (type))
2273 {
2274 TYPE_RM_VALUES (new_type) = NULL_TREE;
2275 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2276 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2277 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2278 }
2279
2280 /* copy_node clears this field instead of copying it, because it is
2281 aliased with TREE_CHAIN. */
2282 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2283
2284 TYPE_POINTER_TO (new_type) = 0;
2285 TYPE_REFERENCE_TO (new_type) = 0;
2286 TYPE_MAIN_VARIANT (new_type) = new_type;
2287 TYPE_NEXT_VARIANT (new_type) = 0;
2288 TYPE_CANONICAL (new_type) = new_type;
2289
2290 return new_type;
2291 }
2292 \f
2293 /* Return a subtype of sizetype with range MIN to MAX and whose
2294 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2295 of the associated TYPE_DECL. */
2296
2297 tree
2298 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2299 {
2300 /* First build a type for the desired range. */
2301 tree type = build_nonshared_range_type (sizetype, min, max);
2302
2303 /* Then set the index type. */
2304 SET_TYPE_INDEX_TYPE (type, index);
2305 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2306
2307 return type;
2308 }
2309
2310 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2311 sizetype is used. */
2312
2313 tree
2314 create_range_type (tree type, tree min, tree max)
2315 {
2316 tree range_type;
2317
2318 if (!type)
2319 type = sizetype;
2320
2321 /* First build a type with the base range. */
2322 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2323 TYPE_MAX_VALUE (type));
2324
2325 /* Then set the actual range. */
2326 SET_TYPE_RM_MIN_VALUE (range_type, min);
2327 SET_TYPE_RM_MAX_VALUE (range_type, max);
2328
2329 return range_type;
2330 }
2331 \f
2332 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2333 NAME gives the name of the type to be used in the declaration. */
2334
2335 tree
2336 create_type_stub_decl (tree name, tree type)
2337 {
2338 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2339 DECL_ARTIFICIAL (type_decl) = 1;
2340 TYPE_ARTIFICIAL (type) = 1;
2341 return type_decl;
2342 }
2343
2344 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2345 used in the declaration. ARTIFICIAL_P is true if the declaration was
2346 generated by the compiler. DEBUG_INFO_P is true if we need to write
2347 debug information about this type. GNAT_NODE is used for the position
2348 of the decl. */
2349
2350 tree
2351 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2352 Node_Id gnat_node)
2353 {
2354 enum tree_code code = TREE_CODE (type);
2355 bool is_named
2356 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2357 tree type_decl;
2358
2359 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2360 gcc_assert (!TYPE_IS_DUMMY_P (type));
2361
2362 /* If the type hasn't been named yet, we're naming it; preserve an existing
2363 TYPE_STUB_DECL that has been attached to it for some purpose. */
2364 if (!is_named && TYPE_STUB_DECL (type))
2365 {
2366 type_decl = TYPE_STUB_DECL (type);
2367 DECL_NAME (type_decl) = name;
2368 }
2369 else
2370 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2371
2372 DECL_ARTIFICIAL (type_decl) = artificial_p;
2373 TYPE_ARTIFICIAL (type) = artificial_p;
2374
2375 /* Add this decl to the current binding level. */
2376 gnat_pushdecl (type_decl, gnat_node);
2377
2378 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2379 causes the name to be also viewed as a "tag" by the debug back-end, with
2380 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2381 types in DWARF.
2382
2383 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2384 from multiple contexts, and "type_decl" references a copy of it: in such a
2385 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2386 with the mechanism above. */
2387 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2388 TYPE_STUB_DECL (type) = type_decl;
2389
2390 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2391 back-end doesn't support, and for others if we don't need to. */
2392 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2393 DECL_IGNORED_P (type_decl) = 1;
2394
2395 return type_decl;
2396 }
2397 \f
2398 /* Return a VAR_DECL or CONST_DECL node.
2399
2400 NAME gives the name of the variable. ASM_NAME is its assembler name
2401 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2402 the GCC tree for an optional initial expression; NULL_TREE if none.
2403
2404 CONST_FLAG is true if this variable is constant, in which case we might
2405 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2406
2407 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2408 definition to be made visible outside of the current compilation unit, for
2409 instance variable definitions in a package specification.
2410
2411 EXTERN_FLAG is true when processing an external variable declaration (as
2412 opposed to a definition: no storage is to be allocated for the variable).
2413
2414 STATIC_FLAG is only relevant when not at top level and indicates whether
2415 to always allocate storage to the variable.
2416
2417 VOLATILE_FLAG is true if this variable is declared as volatile.
2418
2419 ARTIFICIAL_P is true if the variable was generated by the compiler.
2420
2421 DEBUG_INFO_P is true if we need to write debug information for it.
2422
2423 ATTR_LIST is the list of attributes to be attached to the variable.
2424
2425 GNAT_NODE is used for the position of the decl. */
2426
2427 tree
2428 create_var_decl (tree name, tree asm_name, tree type, tree init,
2429 bool const_flag, bool public_flag, bool extern_flag,
2430 bool static_flag, bool volatile_flag, bool artificial_p,
2431 bool debug_info_p, struct attrib *attr_list,
2432 Node_Id gnat_node, bool const_decl_allowed_p)
2433 {
2434 /* Whether the object has static storage duration, either explicitly or by
2435 virtue of being declared at the global level. */
2436 const bool static_storage = static_flag || global_bindings_p ();
2437
2438 /* Whether the initializer is constant: for an external object or an object
2439 with static storage duration, we check that the initializer is a valid
2440 constant expression for initializing a static variable; otherwise, we
2441 only check that it is constant. */
2442 const bool init_const
2443 = (init
2444 && gnat_types_compatible_p (type, TREE_TYPE (init))
2445 && (extern_flag || static_storage
2446 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2447 != NULL_TREE
2448 : TREE_CONSTANT (init)));
2449
2450 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2451 case the initializer may be used in lieu of the DECL node (as done in
2452 Identifier_to_gnu). This is useful to prevent the need of elaboration
2453 code when an identifier for which such a DECL is made is in turn used
2454 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2455 but extra constraints apply to this choice (see below) and they are not
2456 relevant to the distinction we wish to make. */
2457 const bool constant_p = const_flag && init_const;
2458
2459 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2460 and may be used for scalars in general but not for aggregates. */
2461 tree var_decl
2462 = build_decl (input_location,
2463 (constant_p && const_decl_allowed_p
2464 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2465 name, type);
2466
2467 /* Detect constants created by the front-end to hold 'reference to function
2468 calls for stabilization purposes. This is needed for renaming. */
2469 if (const_flag && init && POINTER_TYPE_P (type))
2470 {
2471 tree inner = init;
2472 if (TREE_CODE (inner) == COMPOUND_EXPR)
2473 inner = TREE_OPERAND (inner, 1);
2474 inner = remove_conversions (inner, true);
2475 if (TREE_CODE (inner) == ADDR_EXPR
2476 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2477 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2478 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2479 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2480 DECL_RETURN_VALUE_P (var_decl) = 1;
2481 }
2482
2483 /* If this is external, throw away any initializations (they will be done
2484 elsewhere) unless this is a constant for which we would like to remain
2485 able to get the initializer. If we are defining a global here, leave a
2486 constant initialization and save any variable elaborations for the
2487 elaboration routine. If we are just annotating types, throw away the
2488 initialization if it isn't a constant. */
2489 if ((extern_flag && !constant_p)
2490 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2491 init = NULL_TREE;
2492
2493 /* At the global level, a non-constant initializer generates elaboration
2494 statements. Check that such statements are allowed, that is to say,
2495 not violating a No_Elaboration_Code restriction. */
2496 if (init && !init_const && global_bindings_p ())
2497 Check_Elaboration_Code_Allowed (gnat_node);
2498
2499 /* Attach the initializer, if any. */
2500 DECL_INITIAL (var_decl) = init;
2501
2502 /* Directly set some flags. */
2503 DECL_ARTIFICIAL (var_decl) = artificial_p;
2504 DECL_EXTERNAL (var_decl) = extern_flag;
2505
2506 TREE_CONSTANT (var_decl) = constant_p;
2507 TREE_READONLY (var_decl) = const_flag;
2508
2509 /* The object is public if it is external or if it is declared public
2510 and has static storage duration. */
2511 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2512
2513 /* We need to allocate static storage for an object with static storage
2514 duration if it isn't external. */
2515 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2516
2517 TREE_SIDE_EFFECTS (var_decl)
2518 = TREE_THIS_VOLATILE (var_decl)
2519 = TYPE_VOLATILE (type) | volatile_flag;
2520
2521 if (TREE_SIDE_EFFECTS (var_decl))
2522 TREE_ADDRESSABLE (var_decl) = 1;
2523
2524 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2525 try to fiddle with DECL_COMMON. However, on platforms that don't
2526 support global BSS sections, uninitialized global variables would
2527 go in DATA instead, thus increasing the size of the executable. */
2528 if (!flag_no_common
2529 && TREE_CODE (var_decl) == VAR_DECL
2530 && TREE_PUBLIC (var_decl)
2531 && !have_global_bss_p ())
2532 DECL_COMMON (var_decl) = 1;
2533
2534 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2535 since we will create an associated variable. Likewise for an external
2536 constant whose initializer is not absolute, because this would mean a
2537 global relocation in a read-only section which runs afoul of the PE-COFF
2538 run-time relocation mechanism. */
2539 if (!debug_info_p
2540 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2541 || (extern_flag
2542 && constant_p
2543 && init
2544 && initializer_constant_valid_p (init, TREE_TYPE (init))
2545 != null_pointer_node))
2546 DECL_IGNORED_P (var_decl) = 1;
2547
2548 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2549 if (TREE_CODE (var_decl) == VAR_DECL)
2550 process_attributes (&var_decl, &attr_list, true, gnat_node);
2551
2552 /* Add this decl to the current binding level. */
2553 gnat_pushdecl (var_decl, gnat_node);
2554
2555 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2556 {
2557 /* Let the target mangle the name if this isn't a verbatim asm. */
2558 if (*IDENTIFIER_POINTER (asm_name) != '*')
2559 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2560
2561 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2562 }
2563
2564 return var_decl;
2565 }
2566 \f
2567 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2568
2569 static bool
2570 aggregate_type_contains_array_p (tree type)
2571 {
2572 switch (TREE_CODE (type))
2573 {
2574 case RECORD_TYPE:
2575 case UNION_TYPE:
2576 case QUAL_UNION_TYPE:
2577 {
2578 tree field;
2579 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2580 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2581 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2582 return true;
2583 return false;
2584 }
2585
2586 case ARRAY_TYPE:
2587 return true;
2588
2589 default:
2590 gcc_unreachable ();
2591 }
2592 }
2593
2594 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2595 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2596 is the specified size of the field. If POS is nonzero, it is the bit
2597 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2598 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2599 means we are allowed to take the address of the field; if it is negative,
2600 we should not make a bitfield, which is used by make_aligning_type. */
2601
2602 tree
2603 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2604 int packed, int addressable)
2605 {
2606 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2607
2608 DECL_CONTEXT (field_decl) = record_type;
2609 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2610
2611 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2612 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2613 Likewise for an aggregate without specified position that contains an
2614 array, because in this case slices of variable length of this array
2615 must be handled by GCC and variable-sized objects need to be aligned
2616 to at least a byte boundary. */
2617 if (packed && (TYPE_MODE (type) == BLKmode
2618 || (!pos
2619 && AGGREGATE_TYPE_P (type)
2620 && aggregate_type_contains_array_p (type))))
2621 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2622
2623 /* If a size is specified, use it. Otherwise, if the record type is packed
2624 compute a size to use, which may differ from the object's natural size.
2625 We always set a size in this case to trigger the checks for bitfield
2626 creation below, which is typically required when no position has been
2627 specified. */
2628 if (size)
2629 size = convert (bitsizetype, size);
2630 else if (packed == 1)
2631 {
2632 size = rm_size (type);
2633 if (TYPE_MODE (type) == BLKmode)
2634 size = round_up (size, BITS_PER_UNIT);
2635 }
2636
2637 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2638 specified for two reasons: first if the size differs from the natural
2639 size. Second, if the alignment is insufficient. There are a number of
2640 ways the latter can be true.
2641
2642 We never make a bitfield if the type of the field has a nonconstant size,
2643 because no such entity requiring bitfield operations should reach here.
2644
2645 We do *preventively* make a bitfield when there might be the need for it
2646 but we don't have all the necessary information to decide, as is the case
2647 of a field with no specified position in a packed record.
2648
2649 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2650 in layout_decl or finish_record_type to clear the bit_field indication if
2651 it is in fact not needed. */
2652 if (addressable >= 0
2653 && size
2654 && TREE_CODE (size) == INTEGER_CST
2655 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2656 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2657 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2658 || packed
2659 || (TYPE_ALIGN (record_type) != 0
2660 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2661 {
2662 DECL_BIT_FIELD (field_decl) = 1;
2663 DECL_SIZE (field_decl) = size;
2664 if (!packed && !pos)
2665 {
2666 if (TYPE_ALIGN (record_type) != 0
2667 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2668 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2669 else
2670 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
2671 }
2672 }
2673
2674 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2675
2676 /* Bump the alignment if need be, either for bitfield/packing purposes or
2677 to satisfy the type requirements if no such consideration applies. When
2678 we get the alignment from the type, indicate if this is from an explicit
2679 user request, which prevents stor-layout from lowering it later on. */
2680 {
2681 unsigned int bit_align
2682 = (DECL_BIT_FIELD (field_decl) ? 1
2683 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2684
2685 if (bit_align > DECL_ALIGN (field_decl))
2686 DECL_ALIGN (field_decl) = bit_align;
2687 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2688 {
2689 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
2690 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2691 }
2692 }
2693
2694 if (pos)
2695 {
2696 /* We need to pass in the alignment the DECL is known to have.
2697 This is the lowest-order bit set in POS, but no more than
2698 the alignment of the record, if one is specified. Note
2699 that an alignment of 0 is taken as infinite. */
2700 unsigned int known_align;
2701
2702 if (tree_fits_uhwi_p (pos))
2703 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2704 else
2705 known_align = BITS_PER_UNIT;
2706
2707 if (TYPE_ALIGN (record_type)
2708 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2709 known_align = TYPE_ALIGN (record_type);
2710
2711 layout_decl (field_decl, known_align);
2712 SET_DECL_OFFSET_ALIGN (field_decl,
2713 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2714 : BITS_PER_UNIT);
2715 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2716 &DECL_FIELD_BIT_OFFSET (field_decl),
2717 DECL_OFFSET_ALIGN (field_decl), pos);
2718 }
2719
2720 /* In addition to what our caller says, claim the field is addressable if we
2721 know that its type is not suitable.
2722
2723 The field may also be "technically" nonaddressable, meaning that even if
2724 we attempt to take the field's address we will actually get the address
2725 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2726 value we have at this point is not accurate enough, so we don't account
2727 for this here and let finish_record_type decide. */
2728 if (!addressable && !type_for_nonaliased_component_p (type))
2729 addressable = 1;
2730
2731 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2732
2733 return field_decl;
2734 }
2735 \f
2736 /* Return a PARM_DECL node. NAME is the name of the parameter and TYPE is
2737 its type. READONLY is true if the parameter is readonly (either an In
2738 parameter or an address of a pass-by-ref parameter). */
2739
2740 tree
2741 create_param_decl (tree name, tree type, bool readonly)
2742 {
2743 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2744
2745 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2746 can lead to various ABI violations. */
2747 if (targetm.calls.promote_prototypes (NULL_TREE)
2748 && INTEGRAL_TYPE_P (type)
2749 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2750 {
2751 /* We have to be careful about biased types here. Make a subtype
2752 of integer_type_node with the proper biasing. */
2753 if (TREE_CODE (type) == INTEGER_TYPE
2754 && TYPE_BIASED_REPRESENTATION_P (type))
2755 {
2756 tree subtype
2757 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2758 TREE_TYPE (subtype) = integer_type_node;
2759 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2760 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2761 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2762 type = subtype;
2763 }
2764 else
2765 type = integer_type_node;
2766 }
2767
2768 DECL_ARG_TYPE (param_decl) = type;
2769 TREE_READONLY (param_decl) = readonly;
2770 return param_decl;
2771 }
2772 \f
2773 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2774 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2775 changed. GNAT_NODE is used for the position of error messages. */
2776
2777 void
2778 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2779 Node_Id gnat_node)
2780 {
2781 struct attrib *attr;
2782
2783 for (attr = *attr_list; attr; attr = attr->next)
2784 switch (attr->type)
2785 {
2786 case ATTR_MACHINE_ATTRIBUTE:
2787 Sloc_to_locus (Sloc (gnat_node), &input_location);
2788 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2789 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2790 break;
2791
2792 case ATTR_LINK_ALIAS:
2793 if (!DECL_EXTERNAL (*node))
2794 {
2795 TREE_STATIC (*node) = 1;
2796 assemble_alias (*node, attr->name);
2797 }
2798 break;
2799
2800 case ATTR_WEAK_EXTERNAL:
2801 if (SUPPORTS_WEAK)
2802 declare_weak (*node);
2803 else
2804 post_error ("?weak declarations not supported on this target",
2805 attr->error_point);
2806 break;
2807
2808 case ATTR_LINK_SECTION:
2809 if (targetm_common.have_named_sections)
2810 {
2811 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2812 DECL_COMMON (*node) = 0;
2813 }
2814 else
2815 post_error ("?section attributes are not supported for this target",
2816 attr->error_point);
2817 break;
2818
2819 case ATTR_LINK_CONSTRUCTOR:
2820 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2821 TREE_USED (*node) = 1;
2822 break;
2823
2824 case ATTR_LINK_DESTRUCTOR:
2825 DECL_STATIC_DESTRUCTOR (*node) = 1;
2826 TREE_USED (*node) = 1;
2827 break;
2828
2829 case ATTR_THREAD_LOCAL_STORAGE:
2830 set_decl_tls_model (*node, decl_default_tls_model (*node));
2831 DECL_COMMON (*node) = 0;
2832 break;
2833 }
2834
2835 *attr_list = NULL;
2836 }
2837
2838 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2839 a power of 2. */
2840
2841 bool
2842 value_factor_p (tree value, HOST_WIDE_INT factor)
2843 {
2844 if (tree_fits_uhwi_p (value))
2845 return tree_to_uhwi (value) % factor == 0;
2846
2847 if (TREE_CODE (value) == MULT_EXPR)
2848 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2849 || value_factor_p (TREE_OPERAND (value, 1), factor));
2850
2851 return false;
2852 }
2853
2854 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2855 from the parameter association for the instantiation of a generic. We do
2856 not want to emit source location for them: the code generated for their
2857 initialization is likely to disturb debugging. */
2858
2859 bool
2860 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2861 {
2862 if (Nkind (gnat_node) != N_Defining_Identifier
2863 || !IN (Ekind (gnat_node), Object_Kind)
2864 || Comes_From_Source (gnat_node)
2865 || !Present (Renamed_Object (gnat_node)))
2866 return false;
2867
2868 /* Get the object declaration of the renamed object, if any and if the
2869 renamed object is a mere identifier. */
2870 gnat_node = Renamed_Object (gnat_node);
2871 if (Nkind (gnat_node) != N_Identifier)
2872 return false;
2873
2874 gnat_node = Entity (gnat_node);
2875 if (!Present (Parent (gnat_node)))
2876 return false;
2877
2878 gnat_node = Parent (gnat_node);
2879 return
2880 (Present (gnat_node)
2881 && Nkind (gnat_node) == N_Object_Declaration
2882 && Present (Corresponding_Generic_Association (gnat_node)));
2883 }
2884
2885 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2886 feed it with the elaboration of GNAT_SCOPE. */
2887
2888 static struct deferred_decl_context_node *
2889 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2890 {
2891 struct deferred_decl_context_node *new_node;
2892
2893 new_node
2894 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2895 new_node->decl = decl;
2896 new_node->gnat_scope = gnat_scope;
2897 new_node->force_global = force_global;
2898 new_node->types.create (1);
2899 new_node->next = deferred_decl_context_queue;
2900 deferred_decl_context_queue = new_node;
2901 return new_node;
2902 }
2903
2904 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2905 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2906 computed. */
2907
2908 static void
2909 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2910 {
2911 n->types.safe_push (type);
2912 }
2913
2914 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2915 NULL_TREE if it is not available. */
2916
2917 static tree
2918 compute_deferred_decl_context (Entity_Id gnat_scope)
2919 {
2920 tree context;
2921
2922 if (present_gnu_tree (gnat_scope))
2923 context = get_gnu_tree (gnat_scope);
2924 else
2925 return NULL_TREE;
2926
2927 if (TREE_CODE (context) == TYPE_DECL)
2928 {
2929 const tree context_type = TREE_TYPE (context);
2930
2931 /* Skip dummy types: only the final ones can appear in the context
2932 chain. */
2933 if (TYPE_DUMMY_P (context_type))
2934 return NULL_TREE;
2935
2936 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2937 chain. */
2938 else
2939 context = context_type;
2940 }
2941
2942 return context;
2943 }
2944
2945 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2946 that cannot be processed yet, remove the other ones. If FORCE is true,
2947 force the processing for all nodes, use the global context when nodes don't
2948 have a GNU translation. */
2949
2950 void
2951 process_deferred_decl_context (bool force)
2952 {
2953 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2954 struct deferred_decl_context_node *node;
2955
2956 while (*it != NULL)
2957 {
2958 bool processed = false;
2959 tree context = NULL_TREE;
2960 Entity_Id gnat_scope;
2961
2962 node = *it;
2963
2964 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2965 get the first scope. */
2966 gnat_scope = node->gnat_scope;
2967 while (Present (gnat_scope))
2968 {
2969 context = compute_deferred_decl_context (gnat_scope);
2970 if (!force || context)
2971 break;
2972 gnat_scope = get_debug_scope (gnat_scope, NULL);
2973 }
2974
2975 /* Imported declarations must not be in a local context (i.e. not inside
2976 a function). */
2977 if (context && node->force_global > 0)
2978 {
2979 tree ctx = context;
2980
2981 while (ctx)
2982 {
2983 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2984 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
2985 }
2986 }
2987
2988 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2989 was no elaborated scope, use the global context. */
2990 if (force && !context)
2991 context = get_global_context ();
2992
2993 if (context)
2994 {
2995 tree t;
2996 int i;
2997
2998 DECL_CONTEXT (node->decl) = context;
2999
3000 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3001 ..._TYPE nodes. */
3002 FOR_EACH_VEC_ELT (node->types, i, t)
3003 {
3004 gnat_set_type_context (t, context);
3005 }
3006 processed = true;
3007 }
3008
3009 /* If this node has been successfuly processed, remove it from the
3010 queue. Then move to the next node. */
3011 if (processed)
3012 {
3013 *it = node->next;
3014 node->types.release ();
3015 free (node);
3016 }
3017 else
3018 it = &node->next;
3019 }
3020 }
3021
3022
3023 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3024
3025 static unsigned int
3026 scale_by_factor_of (tree expr, unsigned int value)
3027 {
3028 unsigned HOST_WIDE_INT addend = 0;
3029 unsigned HOST_WIDE_INT factor = 1;
3030
3031 /* Peel conversions around EXPR and try to extract bodies from function
3032 calls: it is possible to get the scale factor from size functions. */
3033 expr = remove_conversions (expr, true);
3034 if (TREE_CODE (expr) == CALL_EXPR)
3035 expr = maybe_inline_call_in_expr (expr);
3036
3037 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3038 multiple of the scale factor we are looking for. */
3039 if (TREE_CODE (expr) == PLUS_EXPR
3040 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3041 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3042 {
3043 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3044 expr = TREE_OPERAND (expr, 0);
3045 }
3046
3047 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3048 corresponding to the number of trailing zeros of the mask. */
3049 if (TREE_CODE (expr) == BIT_AND_EXPR
3050 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3051 {
3052 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3053 unsigned int i = 0;
3054
3055 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3056 {
3057 mask >>= 1;
3058 factor *= 2;
3059 i++;
3060 }
3061 }
3062
3063 /* If the addend is not a multiple of the factor we found, give up. In
3064 theory we could find a smaller common factor but it's useless for our
3065 needs. This situation arises when dealing with a field F1 with no
3066 alignment requirement but that is following a field F2 with such
3067 requirements. As long as we have F2's offset, we don't need alignment
3068 information to compute F1's. */
3069 if (addend % factor != 0)
3070 factor = 1;
3071
3072 return factor * value;
3073 }
3074
3075 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3076 unless we can prove these 2 fields are laid out in such a way that no gap
3077 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3078 is the distance in bits between the end of PREV_FIELD and the starting
3079 position of CURR_FIELD. It is ignored if null. */
3080
3081 static bool
3082 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3083 {
3084 /* If this is the first field of the record, there cannot be any gap */
3085 if (!prev_field)
3086 return false;
3087
3088 /* If the previous field is a union type, then return false: The only
3089 time when such a field is not the last field of the record is when
3090 there are other components at fixed positions after it (meaning there
3091 was a rep clause for every field), in which case we don't want the
3092 alignment constraint to override them. */
3093 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3094 return false;
3095
3096 /* If the distance between the end of prev_field and the beginning of
3097 curr_field is constant, then there is a gap if the value of this
3098 constant is not null. */
3099 if (offset && tree_fits_uhwi_p (offset))
3100 return !integer_zerop (offset);
3101
3102 /* If the size and position of the previous field are constant,
3103 then check the sum of this size and position. There will be a gap
3104 iff it is not multiple of the current field alignment. */
3105 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3106 && tree_fits_uhwi_p (bit_position (prev_field)))
3107 return ((tree_to_uhwi (bit_position (prev_field))
3108 + tree_to_uhwi (DECL_SIZE (prev_field)))
3109 % DECL_ALIGN (curr_field) != 0);
3110
3111 /* If both the position and size of the previous field are multiples
3112 of the current field alignment, there cannot be any gap. */
3113 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3114 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3115 return false;
3116
3117 /* Fallback, return that there may be a potential gap */
3118 return true;
3119 }
3120
3121 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3122 the decl. */
3123
3124 tree
3125 create_label_decl (tree name, Node_Id gnat_node)
3126 {
3127 tree label_decl
3128 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3129
3130 DECL_MODE (label_decl) = VOIDmode;
3131
3132 /* Add this decl to the current binding level. */
3133 gnat_pushdecl (label_decl, gnat_node);
3134
3135 return label_decl;
3136 }
3137 \f
3138 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3139 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3140 the list of its parameters (a list of PARM_DECL nodes chained through the
3141 DECL_CHAIN field).
3142
3143 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3144
3145 CONST_FLAG, PUBLIC_FLAG, EXTERN_FLAG, VOLATILE_FLAG are used to set the
3146 appropriate flags on the FUNCTION_DECL.
3147
3148 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3149
3150 DEBUG_INFO_P is true if we need to write debug information for it.
3151
3152 ATTR_LIST is the list of attributes to be attached to the subprogram.
3153
3154 GNAT_NODE is used for the position of the decl. */
3155
3156 tree
3157 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3158 enum inline_status_t inline_status, bool const_flag,
3159 bool public_flag, bool extern_flag, bool volatile_flag,
3160 bool artificial_p, bool debug_info_p,
3161 struct attrib *attr_list, Node_Id gnat_node)
3162 {
3163 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3164 tree result_decl
3165 = build_decl (input_location, RESULT_DECL, NULL_TREE, TREE_TYPE (type));
3166 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3167
3168 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3169 DECL_EXTERNAL (subprog_decl) = extern_flag;
3170
3171 switch (inline_status)
3172 {
3173 case is_suppressed:
3174 DECL_UNINLINABLE (subprog_decl) = 1;
3175 break;
3176
3177 case is_disabled:
3178 break;
3179
3180 case is_required:
3181 if (Back_End_Inlining)
3182 decl_attributes (&subprog_decl,
3183 tree_cons (get_identifier ("always_inline"),
3184 NULL_TREE, NULL_TREE),
3185 ATTR_FLAG_TYPE_IN_PLACE);
3186
3187 /* ... fall through ... */
3188
3189 case is_enabled:
3190 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3191 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3192 break;
3193
3194 default:
3195 gcc_unreachable ();
3196 }
3197
3198 if (!debug_info_p)
3199 DECL_IGNORED_P (subprog_decl) = 1;
3200
3201 TREE_READONLY (subprog_decl) = TYPE_READONLY (type) | const_flag;
3202 TREE_PUBLIC (subprog_decl) = public_flag;
3203 TREE_SIDE_EFFECTS (subprog_decl)
3204 = TREE_THIS_VOLATILE (subprog_decl)
3205 = TYPE_VOLATILE (type) | volatile_flag;
3206
3207 DECL_ARTIFICIAL (result_decl) = 1;
3208 DECL_IGNORED_P (result_decl) = 1;
3209 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3210 DECL_RESULT (subprog_decl) = result_decl;
3211
3212 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3213
3214 /* Add this decl to the current binding level. */
3215 gnat_pushdecl (subprog_decl, gnat_node);
3216
3217 if (asm_name)
3218 {
3219 /* Let the target mangle the name if this isn't a verbatim asm. */
3220 if (*IDENTIFIER_POINTER (asm_name) != '*')
3221 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3222
3223 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3224
3225 /* The expand_main_function circuitry expects "main_identifier_node" to
3226 designate the DECL_NAME of the 'main' entry point, in turn expected
3227 to be declared as the "main" function literally by default. Ada
3228 program entry points are typically declared with a different name
3229 within the binder generated file, exported as 'main' to satisfy the
3230 system expectations. Force main_identifier_node in this case. */
3231 if (asm_name == main_identifier_node)
3232 DECL_NAME (subprog_decl) = main_identifier_node;
3233 }
3234
3235 /* Output the assembler code and/or RTL for the declaration. */
3236 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3237
3238 return subprog_decl;
3239 }
3240 \f
3241 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3242 body. This routine needs to be invoked before processing the declarations
3243 appearing in the subprogram. */
3244
3245 void
3246 begin_subprog_body (tree subprog_decl)
3247 {
3248 tree param_decl;
3249
3250 announce_function (subprog_decl);
3251
3252 /* This function is being defined. */
3253 TREE_STATIC (subprog_decl) = 1;
3254
3255 /* The failure of this assertion will likely come from a wrong context for
3256 the subprogram body, e.g. another procedure for a procedure declared at
3257 library level. */
3258 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3259
3260 current_function_decl = subprog_decl;
3261
3262 /* Enter a new binding level and show that all the parameters belong to
3263 this function. */
3264 gnat_pushlevel ();
3265
3266 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3267 param_decl = DECL_CHAIN (param_decl))
3268 DECL_CONTEXT (param_decl) = subprog_decl;
3269
3270 make_decl_rtl (subprog_decl);
3271 }
3272
3273 /* Finish translating the current subprogram and set its BODY. */
3274
3275 void
3276 end_subprog_body (tree body)
3277 {
3278 tree fndecl = current_function_decl;
3279
3280 /* Attach the BLOCK for this level to the function and pop the level. */
3281 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3282 DECL_INITIAL (fndecl) = current_binding_level->block;
3283 gnat_poplevel ();
3284
3285 /* Mark the RESULT_DECL as being in this subprogram. */
3286 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3287
3288 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3289 if (TREE_CODE (body) == BIND_EXPR)
3290 {
3291 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3292 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3293 }
3294
3295 DECL_SAVED_TREE (fndecl) = body;
3296
3297 current_function_decl = decl_function_context (fndecl);
3298 }
3299
3300 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3301
3302 void
3303 rest_of_subprog_body_compilation (tree subprog_decl)
3304 {
3305 /* We cannot track the location of errors past this point. */
3306 error_gnat_node = Empty;
3307
3308 /* If we're only annotating types, don't actually compile this function. */
3309 if (type_annotate_only)
3310 return;
3311
3312 /* Dump functions before gimplification. */
3313 dump_function (TDI_original, subprog_decl);
3314
3315 if (!decl_function_context (subprog_decl))
3316 cgraph_node::finalize_function (subprog_decl, false);
3317 else
3318 /* Register this function with cgraph just far enough to get it
3319 added to our parent's nested function list. */
3320 (void) cgraph_node::get_create (subprog_decl);
3321 }
3322
3323 tree
3324 gnat_builtin_function (tree decl)
3325 {
3326 gnat_pushdecl (decl, Empty);
3327 return decl;
3328 }
3329
3330 /* Return an integer type with the number of bits of precision given by
3331 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3332 it is a signed type. */
3333
3334 tree
3335 gnat_type_for_size (unsigned precision, int unsignedp)
3336 {
3337 tree t;
3338 char type_name[20];
3339
3340 if (precision <= 2 * MAX_BITS_PER_WORD
3341 && signed_and_unsigned_types[precision][unsignedp])
3342 return signed_and_unsigned_types[precision][unsignedp];
3343
3344 if (unsignedp)
3345 t = make_unsigned_type (precision);
3346 else
3347 t = make_signed_type (precision);
3348
3349 if (precision <= 2 * MAX_BITS_PER_WORD)
3350 signed_and_unsigned_types[precision][unsignedp] = t;
3351
3352 if (!TYPE_NAME (t))
3353 {
3354 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3355 TYPE_NAME (t) = get_identifier (type_name);
3356 }
3357
3358 return t;
3359 }
3360
3361 /* Likewise for floating-point types. */
3362
3363 static tree
3364 float_type_for_precision (int precision, machine_mode mode)
3365 {
3366 tree t;
3367 char type_name[20];
3368
3369 if (float_types[(int) mode])
3370 return float_types[(int) mode];
3371
3372 float_types[(int) mode] = t = make_node (REAL_TYPE);
3373 TYPE_PRECISION (t) = precision;
3374 layout_type (t);
3375
3376 gcc_assert (TYPE_MODE (t) == mode);
3377 if (!TYPE_NAME (t))
3378 {
3379 sprintf (type_name, "FLOAT_%d", precision);
3380 TYPE_NAME (t) = get_identifier (type_name);
3381 }
3382
3383 return t;
3384 }
3385
3386 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3387 an unsigned type; otherwise a signed type is returned. */
3388
3389 tree
3390 gnat_type_for_mode (machine_mode mode, int unsignedp)
3391 {
3392 if (mode == BLKmode)
3393 return NULL_TREE;
3394
3395 if (mode == VOIDmode)
3396 return void_type_node;
3397
3398 if (COMPLEX_MODE_P (mode))
3399 return NULL_TREE;
3400
3401 if (SCALAR_FLOAT_MODE_P (mode))
3402 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3403
3404 if (SCALAR_INT_MODE_P (mode))
3405 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3406
3407 if (VECTOR_MODE_P (mode))
3408 {
3409 machine_mode inner_mode = GET_MODE_INNER (mode);
3410 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3411 if (inner_type)
3412 return build_vector_type_for_mode (inner_type, mode);
3413 }
3414
3415 return NULL_TREE;
3416 }
3417
3418 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3419 signedness being specified by UNSIGNEDP. */
3420
3421 tree
3422 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3423 {
3424 if (type_node == char_type_node)
3425 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3426
3427 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3428
3429 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3430 {
3431 type = copy_node (type);
3432 TREE_TYPE (type) = type_node;
3433 }
3434 else if (TREE_TYPE (type_node)
3435 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3436 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3437 {
3438 type = copy_node (type);
3439 TREE_TYPE (type) = TREE_TYPE (type_node);
3440 }
3441
3442 return type;
3443 }
3444
3445 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3446 transparently converted to each other. */
3447
3448 int
3449 gnat_types_compatible_p (tree t1, tree t2)
3450 {
3451 enum tree_code code;
3452
3453 /* This is the default criterion. */
3454 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3455 return 1;
3456
3457 /* We only check structural equivalence here. */
3458 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3459 return 0;
3460
3461 /* Vector types are also compatible if they have the same number of subparts
3462 and the same form of (scalar) element type. */
3463 if (code == VECTOR_TYPE
3464 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3465 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3466 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3467 return 1;
3468
3469 /* Array types are also compatible if they are constrained and have the same
3470 domain(s), the same component type and the same scalar storage order. */
3471 if (code == ARRAY_TYPE
3472 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3473 || (TYPE_DOMAIN (t1)
3474 && TYPE_DOMAIN (t2)
3475 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3476 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3477 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3478 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3479 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3480 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3481 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3482 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3483 return 1;
3484
3485 return 0;
3486 }
3487
3488 /* Return true if EXPR is a useless type conversion. */
3489
3490 bool
3491 gnat_useless_type_conversion (tree expr)
3492 {
3493 if (CONVERT_EXPR_P (expr)
3494 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3495 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3496 return gnat_types_compatible_p (TREE_TYPE (expr),
3497 TREE_TYPE (TREE_OPERAND (expr, 0)));
3498
3499 return false;
3500 }
3501
3502 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3503
3504 bool
3505 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3506 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3507 {
3508 return TYPE_CI_CO_LIST (t) == cico_list
3509 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3510 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3511 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3512 }
3513 \f
3514 /* EXP is an expression for the size of an object. If this size contains
3515 discriminant references, replace them with the maximum (if MAX_P) or
3516 minimum (if !MAX_P) possible value of the discriminant. */
3517
3518 tree
3519 max_size (tree exp, bool max_p)
3520 {
3521 enum tree_code code = TREE_CODE (exp);
3522 tree type = TREE_TYPE (exp);
3523
3524 switch (TREE_CODE_CLASS (code))
3525 {
3526 case tcc_declaration:
3527 case tcc_constant:
3528 return exp;
3529
3530 case tcc_vl_exp:
3531 if (code == CALL_EXPR)
3532 {
3533 tree t, *argarray;
3534 int n, i;
3535
3536 t = maybe_inline_call_in_expr (exp);
3537 if (t)
3538 return max_size (t, max_p);
3539
3540 n = call_expr_nargs (exp);
3541 gcc_assert (n > 0);
3542 argarray = XALLOCAVEC (tree, n);
3543 for (i = 0; i < n; i++)
3544 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3545 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3546 }
3547 break;
3548
3549 case tcc_reference:
3550 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3551 modify. Otherwise, we treat it like a variable. */
3552 if (CONTAINS_PLACEHOLDER_P (exp))
3553 {
3554 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3555 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3556 return max_size (convert (get_base_type (val_type), val), true);
3557 }
3558
3559 return exp;
3560
3561 case tcc_comparison:
3562 return max_p ? size_one_node : size_zero_node;
3563
3564 case tcc_unary:
3565 if (code == NON_LVALUE_EXPR)
3566 return max_size (TREE_OPERAND (exp, 0), max_p);
3567
3568 return fold_build1 (code, type,
3569 max_size (TREE_OPERAND (exp, 0),
3570 code == NEGATE_EXPR ? !max_p : max_p));
3571
3572 case tcc_binary:
3573 {
3574 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3575 tree rhs = max_size (TREE_OPERAND (exp, 1),
3576 code == MINUS_EXPR ? !max_p : max_p);
3577
3578 /* Special-case wanting the maximum value of a MIN_EXPR.
3579 In that case, if one side overflows, return the other. */
3580 if (max_p && code == MIN_EXPR)
3581 {
3582 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3583 return lhs;
3584
3585 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3586 return rhs;
3587 }
3588
3589 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3590 overflowing and the RHS a variable. */
3591 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3592 && TREE_CODE (lhs) == INTEGER_CST
3593 && TREE_OVERFLOW (lhs)
3594 && TREE_CODE (rhs) != INTEGER_CST)
3595 return lhs;
3596
3597 /* If we are going to subtract a "negative" value in an unsigned type,
3598 do the operation as an addition of the negated value, in order to
3599 avoid creating a spurious overflow below. */
3600 if (code == MINUS_EXPR
3601 && TYPE_UNSIGNED (type)
3602 && TREE_CODE (rhs) == INTEGER_CST
3603 && !TREE_OVERFLOW (rhs)
3604 && tree_int_cst_sign_bit (rhs) != 0)
3605 {
3606 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3607 code = PLUS_EXPR;
3608 }
3609
3610 /* We need to detect overflows so we call size_binop here. */
3611 return size_binop (code, lhs, rhs);
3612 }
3613
3614 case tcc_expression:
3615 switch (TREE_CODE_LENGTH (code))
3616 {
3617 case 1:
3618 if (code == SAVE_EXPR)
3619 return exp;
3620
3621 return fold_build1 (code, type,
3622 max_size (TREE_OPERAND (exp, 0), max_p));
3623
3624 case 2:
3625 if (code == COMPOUND_EXPR)
3626 return max_size (TREE_OPERAND (exp, 1), max_p);
3627
3628 return fold_build2 (code, type,
3629 max_size (TREE_OPERAND (exp, 0), max_p),
3630 max_size (TREE_OPERAND (exp, 1), max_p));
3631
3632 case 3:
3633 if (code == COND_EXPR)
3634 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3635 max_size (TREE_OPERAND (exp, 1), max_p),
3636 max_size (TREE_OPERAND (exp, 2), max_p));
3637
3638 default:
3639 break;
3640 }
3641
3642 /* Other tree classes cannot happen. */
3643 default:
3644 break;
3645 }
3646
3647 gcc_unreachable ();
3648 }
3649 \f
3650 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3651 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3652 Return a constructor for the template. */
3653
3654 tree
3655 build_template (tree template_type, tree array_type, tree expr)
3656 {
3657 vec<constructor_elt, va_gc> *template_elts = NULL;
3658 tree bound_list = NULL_TREE;
3659 tree field;
3660
3661 while (TREE_CODE (array_type) == RECORD_TYPE
3662 && (TYPE_PADDING_P (array_type)
3663 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3664 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3665
3666 if (TREE_CODE (array_type) == ARRAY_TYPE
3667 || (TREE_CODE (array_type) == INTEGER_TYPE
3668 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3669 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3670
3671 /* First make the list for a CONSTRUCTOR for the template. Go down the
3672 field list of the template instead of the type chain because this
3673 array might be an Ada array of arrays and we can't tell where the
3674 nested arrays stop being the underlying object. */
3675
3676 for (field = TYPE_FIELDS (template_type); field;
3677 (bound_list
3678 ? (bound_list = TREE_CHAIN (bound_list))
3679 : (array_type = TREE_TYPE (array_type))),
3680 field = DECL_CHAIN (DECL_CHAIN (field)))
3681 {
3682 tree bounds, min, max;
3683
3684 /* If we have a bound list, get the bounds from there. Likewise
3685 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3686 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3687 This will give us a maximum range. */
3688 if (bound_list)
3689 bounds = TREE_VALUE (bound_list);
3690 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3691 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3692 else if (expr && TREE_CODE (expr) == PARM_DECL
3693 && DECL_BY_COMPONENT_PTR_P (expr))
3694 bounds = TREE_TYPE (field);
3695 else
3696 gcc_unreachable ();
3697
3698 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3699 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3700
3701 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3702 substitute it from OBJECT. */
3703 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3704 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3705
3706 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3707 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3708 }
3709
3710 return gnat_build_constructor (template_type, template_elts);
3711 }
3712 \f
3713 /* Return true if TYPE is suitable for the element type of a vector. */
3714
3715 static bool
3716 type_for_vector_element_p (tree type)
3717 {
3718 machine_mode mode;
3719
3720 if (!INTEGRAL_TYPE_P (type)
3721 && !SCALAR_FLOAT_TYPE_P (type)
3722 && !FIXED_POINT_TYPE_P (type))
3723 return false;
3724
3725 mode = TYPE_MODE (type);
3726 if (GET_MODE_CLASS (mode) != MODE_INT
3727 && !SCALAR_FLOAT_MODE_P (mode)
3728 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3729 return false;
3730
3731 return true;
3732 }
3733
3734 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3735 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3736 attribute declaration and want to issue error messages on failure. */
3737
3738 static tree
3739 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3740 {
3741 unsigned HOST_WIDE_INT size_int, inner_size_int;
3742 int nunits;
3743
3744 /* Silently punt on variable sizes. We can't make vector types for them,
3745 need to ignore them on front-end generated subtypes of unconstrained
3746 base types, and this attribute is for binding implementors, not end
3747 users, so we should never get there from legitimate explicit uses. */
3748 if (!tree_fits_uhwi_p (size))
3749 return NULL_TREE;
3750 size_int = tree_to_uhwi (size);
3751
3752 if (!type_for_vector_element_p (inner_type))
3753 {
3754 if (attribute)
3755 error ("invalid element type for attribute %qs",
3756 IDENTIFIER_POINTER (attribute));
3757 return NULL_TREE;
3758 }
3759 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3760
3761 if (size_int % inner_size_int)
3762 {
3763 if (attribute)
3764 error ("vector size not an integral multiple of component size");
3765 return NULL_TREE;
3766 }
3767
3768 if (size_int == 0)
3769 {
3770 if (attribute)
3771 error ("zero vector size");
3772 return NULL_TREE;
3773 }
3774
3775 nunits = size_int / inner_size_int;
3776 if (nunits & (nunits - 1))
3777 {
3778 if (attribute)
3779 error ("number of components of vector not a power of two");
3780 return NULL_TREE;
3781 }
3782
3783 return build_vector_type (inner_type, nunits);
3784 }
3785
3786 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3787 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3788 processing the attribute and want to issue error messages on failure. */
3789
3790 static tree
3791 build_vector_type_for_array (tree array_type, tree attribute)
3792 {
3793 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3794 TYPE_SIZE_UNIT (array_type),
3795 attribute);
3796 if (!vector_type)
3797 return NULL_TREE;
3798
3799 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3800 return vector_type;
3801 }
3802 \f
3803 /* Build a type to be used to represent an aliased object whose nominal type
3804 is an unconstrained array. This consists of a RECORD_TYPE containing a
3805 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3806 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3807 an arbitrary unconstrained object. Use NAME as the name of the record.
3808 DEBUG_INFO_P is true if we need to write debug information for the type. */
3809
3810 tree
3811 build_unc_object_type (tree template_type, tree object_type, tree name,
3812 bool debug_info_p)
3813 {
3814 tree decl;
3815 tree type = make_node (RECORD_TYPE);
3816 tree template_field
3817 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3818 NULL_TREE, NULL_TREE, 0, 1);
3819 tree array_field
3820 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3821 NULL_TREE, NULL_TREE, 0, 1);
3822
3823 TYPE_NAME (type) = name;
3824 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3825 DECL_CHAIN (template_field) = array_field;
3826 finish_record_type (type, template_field, 0, true);
3827
3828 /* Declare it now since it will never be declared otherwise. This is
3829 necessary to ensure that its subtrees are properly marked. */
3830 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3831
3832 /* template_type will not be used elsewhere than here, so to keep the debug
3833 info clean and in order to avoid scoping issues, make decl its
3834 context. */
3835 gnat_set_type_context (template_type, decl);
3836
3837 return type;
3838 }
3839
3840 /* Same, taking a thin or fat pointer type instead of a template type. */
3841
3842 tree
3843 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3844 tree name, bool debug_info_p)
3845 {
3846 tree template_type;
3847
3848 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3849
3850 template_type
3851 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3852 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3853 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3854
3855 return
3856 build_unc_object_type (template_type, object_type, name, debug_info_p);
3857 }
3858 \f
3859 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3860 In the normal case this is just two adjustments, but we have more to
3861 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3862
3863 void
3864 update_pointer_to (tree old_type, tree new_type)
3865 {
3866 tree ptr = TYPE_POINTER_TO (old_type);
3867 tree ref = TYPE_REFERENCE_TO (old_type);
3868 tree t;
3869
3870 /* If this is the main variant, process all the other variants first. */
3871 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3872 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3873 update_pointer_to (t, new_type);
3874
3875 /* If no pointers and no references, we are done. */
3876 if (!ptr && !ref)
3877 return;
3878
3879 /* Merge the old type qualifiers in the new type.
3880
3881 Each old variant has qualifiers for specific reasons, and the new
3882 designated type as well. Each set of qualifiers represents useful
3883 information grabbed at some point, and merging the two simply unifies
3884 these inputs into the final type description.
3885
3886 Consider for instance a volatile type frozen after an access to constant
3887 type designating it; after the designated type's freeze, we get here with
3888 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3889 when the access type was processed. We will make a volatile and readonly
3890 designated type, because that's what it really is.
3891
3892 We might also get here for a non-dummy OLD_TYPE variant with different
3893 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3894 to private record type elaboration (see the comments around the call to
3895 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3896 the qualifiers in those cases too, to avoid accidentally discarding the
3897 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3898 new_type
3899 = build_qualified_type (new_type,
3900 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3901
3902 /* If old type and new type are identical, there is nothing to do. */
3903 if (old_type == new_type)
3904 return;
3905
3906 /* Otherwise, first handle the simple case. */
3907 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3908 {
3909 tree new_ptr, new_ref;
3910
3911 /* If pointer or reference already points to new type, nothing to do.
3912 This can happen as update_pointer_to can be invoked multiple times
3913 on the same couple of types because of the type variants. */
3914 if ((ptr && TREE_TYPE (ptr) == new_type)
3915 || (ref && TREE_TYPE (ref) == new_type))
3916 return;
3917
3918 /* Chain PTR and its variants at the end. */
3919 new_ptr = TYPE_POINTER_TO (new_type);
3920 if (new_ptr)
3921 {
3922 while (TYPE_NEXT_PTR_TO (new_ptr))
3923 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3924 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3925 }
3926 else
3927 TYPE_POINTER_TO (new_type) = ptr;
3928
3929 /* Now adjust them. */
3930 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3931 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3932 {
3933 TREE_TYPE (t) = new_type;
3934 if (TYPE_NULL_BOUNDS (t))
3935 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3936 }
3937
3938 /* Chain REF and its variants at the end. */
3939 new_ref = TYPE_REFERENCE_TO (new_type);
3940 if (new_ref)
3941 {
3942 while (TYPE_NEXT_REF_TO (new_ref))
3943 new_ref = TYPE_NEXT_REF_TO (new_ref);
3944 TYPE_NEXT_REF_TO (new_ref) = ref;
3945 }
3946 else
3947 TYPE_REFERENCE_TO (new_type) = ref;
3948
3949 /* Now adjust them. */
3950 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3951 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3952 TREE_TYPE (t) = new_type;
3953
3954 TYPE_POINTER_TO (old_type) = NULL_TREE;
3955 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3956 }
3957
3958 /* Now deal with the unconstrained array case. In this case the pointer
3959 is actually a record where both fields are pointers to dummy nodes.
3960 Turn them into pointers to the correct types using update_pointer_to.
3961 Likewise for the pointer to the object record (thin pointer). */
3962 else
3963 {
3964 tree new_ptr = TYPE_POINTER_TO (new_type);
3965
3966 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3967
3968 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3969 since update_pointer_to can be invoked multiple times on the same
3970 couple of types because of the type variants. */
3971 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3972 return;
3973
3974 update_pointer_to
3975 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3976 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3977
3978 update_pointer_to
3979 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3980 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3981
3982 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3983 TYPE_OBJECT_RECORD_TYPE (new_type));
3984
3985 TYPE_POINTER_TO (old_type) = NULL_TREE;
3986 }
3987 }
3988 \f
3989 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3990 unconstrained one. This involves making or finding a template. */
3991
3992 static tree
3993 convert_to_fat_pointer (tree type, tree expr)
3994 {
3995 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3996 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3997 tree etype = TREE_TYPE (expr);
3998 tree template_addr;
3999 vec<constructor_elt, va_gc> *v;
4000 vec_alloc (v, 2);
4001
4002 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4003 array (compare_fat_pointers ensures that this is the full discriminant)
4004 and a valid pointer to the bounds. This latter property is necessary
4005 since the compiler can hoist the load of the bounds done through it. */
4006 if (integer_zerop (expr))
4007 {
4008 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4009 tree null_bounds, t;
4010
4011 if (TYPE_NULL_BOUNDS (ptr_template_type))
4012 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4013 else
4014 {
4015 /* The template type can still be dummy at this point so we build an
4016 empty constructor. The middle-end will fill it in with zeros. */
4017 t = build_constructor (template_type, NULL);
4018 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4019 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4020 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4021 }
4022
4023 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4024 fold_convert (p_array_type, null_pointer_node));
4025 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4026 t = build_constructor (type, v);
4027 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4028 TREE_CONSTANT (t) = 0;
4029 TREE_STATIC (t) = 1;
4030
4031 return t;
4032 }
4033
4034 /* If EXPR is a thin pointer, make template and data from the record. */
4035 if (TYPE_IS_THIN_POINTER_P (etype))
4036 {
4037 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4038
4039 expr = gnat_protect_expr (expr);
4040
4041 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4042 the thin pointer value has been shifted so we shift it back to get
4043 the template address. */
4044 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4045 {
4046 template_addr
4047 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4048 fold_build1 (NEGATE_EXPR, sizetype,
4049 byte_position
4050 (DECL_CHAIN (field))));
4051 template_addr
4052 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4053 template_addr);
4054 }
4055
4056 /* Otherwise we explicitly take the address of the fields. */
4057 else
4058 {
4059 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4060 template_addr
4061 = build_unary_op (ADDR_EXPR, NULL_TREE,
4062 build_component_ref (expr, field, false));
4063 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4064 build_component_ref (expr, DECL_CHAIN (field),
4065 false));
4066 }
4067 }
4068
4069 /* Otherwise, build the constructor for the template. */
4070 else
4071 template_addr
4072 = build_unary_op (ADDR_EXPR, NULL_TREE,
4073 build_template (template_type, TREE_TYPE (etype),
4074 expr));
4075
4076 /* The final result is a constructor for the fat pointer.
4077
4078 If EXPR is an argument of a foreign convention subprogram, the type it
4079 points to is directly the component type. In this case, the expression
4080 type may not match the corresponding FIELD_DECL type at this point, so we
4081 call "convert" here to fix that up if necessary. This type consistency is
4082 required, for instance because it ensures that possible later folding of
4083 COMPONENT_REFs against this constructor always yields something of the
4084 same type as the initial reference.
4085
4086 Note that the call to "build_template" above is still fine because it
4087 will only refer to the provided TEMPLATE_TYPE in this case. */
4088 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4089 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4090 return gnat_build_constructor (type, v);
4091 }
4092 \f
4093 /* Create an expression whose value is that of EXPR,
4094 converted to type TYPE. The TREE_TYPE of the value
4095 is always TYPE. This function implements all reasonable
4096 conversions; callers should filter out those that are
4097 not permitted by the language being compiled. */
4098
4099 tree
4100 convert (tree type, tree expr)
4101 {
4102 tree etype = TREE_TYPE (expr);
4103 enum tree_code ecode = TREE_CODE (etype);
4104 enum tree_code code = TREE_CODE (type);
4105
4106 /* If the expression is already of the right type, we are done. */
4107 if (etype == type)
4108 return expr;
4109
4110 /* If both input and output have padding and are of variable size, do this
4111 as an unchecked conversion. Likewise if one is a mere variant of the
4112 other, so we avoid a pointless unpad/repad sequence. */
4113 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4114 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4115 && (!TREE_CONSTANT (TYPE_SIZE (type))
4116 || !TREE_CONSTANT (TYPE_SIZE (etype))
4117 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4118 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4119 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4120 ;
4121
4122 /* If the output type has padding, convert to the inner type and make a
4123 constructor to build the record, unless a variable size is involved. */
4124 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4125 {
4126 vec<constructor_elt, va_gc> *v;
4127
4128 /* If we previously converted from another type and our type is
4129 of variable size, remove the conversion to avoid the need for
4130 variable-sized temporaries. Likewise for a conversion between
4131 original and packable version. */
4132 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4133 && (!TREE_CONSTANT (TYPE_SIZE (type))
4134 || (ecode == RECORD_TYPE
4135 && TYPE_NAME (etype)
4136 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4137 expr = TREE_OPERAND (expr, 0);
4138
4139 /* If we are just removing the padding from expr, convert the original
4140 object if we have variable size in order to avoid the need for some
4141 variable-sized temporaries. Likewise if the padding is a variant
4142 of the other, so we avoid a pointless unpad/repad sequence. */
4143 if (TREE_CODE (expr) == COMPONENT_REF
4144 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4145 && (!TREE_CONSTANT (TYPE_SIZE (type))
4146 || TYPE_MAIN_VARIANT (type)
4147 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4148 || (ecode == RECORD_TYPE
4149 && TYPE_NAME (etype)
4150 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4151 return convert (type, TREE_OPERAND (expr, 0));
4152
4153 /* If the inner type is of self-referential size and the expression type
4154 is a record, do this as an unchecked conversion. But first pad the
4155 expression if possible to have the same size on both sides. */
4156 if (ecode == RECORD_TYPE
4157 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4158 {
4159 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4160 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4161 false, false, false, true),
4162 expr);
4163 return unchecked_convert (type, expr, false);
4164 }
4165
4166 /* If we are converting between array types with variable size, do the
4167 final conversion as an unchecked conversion, again to avoid the need
4168 for some variable-sized temporaries. If valid, this conversion is
4169 very likely purely technical and without real effects. */
4170 if (ecode == ARRAY_TYPE
4171 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4172 && !TREE_CONSTANT (TYPE_SIZE (etype))
4173 && !TREE_CONSTANT (TYPE_SIZE (type)))
4174 return unchecked_convert (type,
4175 convert (TREE_TYPE (TYPE_FIELDS (type)),
4176 expr),
4177 false);
4178
4179 vec_alloc (v, 1);
4180 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4181 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4182 return gnat_build_constructor (type, v);
4183 }
4184
4185 /* If the input type has padding, remove it and convert to the output type.
4186 The conditions ordering is arranged to ensure that the output type is not
4187 a padding type here, as it is not clear whether the conversion would
4188 always be correct if this was to happen. */
4189 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4190 {
4191 tree unpadded;
4192
4193 /* If we have just converted to this padded type, just get the
4194 inner expression. */
4195 if (TREE_CODE (expr) == CONSTRUCTOR)
4196 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4197
4198 /* Otherwise, build an explicit component reference. */
4199 else
4200 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4201
4202 return convert (type, unpadded);
4203 }
4204
4205 /* If the input is a biased type, adjust first. */
4206 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4207 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4208 fold_convert (TREE_TYPE (etype), expr),
4209 fold_convert (TREE_TYPE (etype),
4210 TYPE_MIN_VALUE (etype))));
4211
4212 /* If the input is a justified modular type, we need to extract the actual
4213 object before converting it to any other type with the exceptions of an
4214 unconstrained array or of a mere type variant. It is useful to avoid the
4215 extraction and conversion in the type variant case because it could end
4216 up replacing a VAR_DECL expr by a constructor and we might be about the
4217 take the address of the result. */
4218 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4219 && code != UNCONSTRAINED_ARRAY_TYPE
4220 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4221 return
4222 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4223
4224 /* If converting to a type that contains a template, convert to the data
4225 type and then build the template. */
4226 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4227 {
4228 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4229 vec<constructor_elt, va_gc> *v;
4230 vec_alloc (v, 2);
4231
4232 /* If the source already has a template, get a reference to the
4233 associated array only, as we are going to rebuild a template
4234 for the target type anyway. */
4235 expr = maybe_unconstrained_array (expr);
4236
4237 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4238 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4239 obj_type, NULL_TREE));
4240 if (expr)
4241 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4242 convert (obj_type, expr));
4243 return gnat_build_constructor (type, v);
4244 }
4245
4246 /* There are some cases of expressions that we process specially. */
4247 switch (TREE_CODE (expr))
4248 {
4249 case ERROR_MARK:
4250 return expr;
4251
4252 case NULL_EXPR:
4253 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4254 conversion in gnat_expand_expr. NULL_EXPR does not represent
4255 and actual value, so no conversion is needed. */
4256 expr = copy_node (expr);
4257 TREE_TYPE (expr) = type;
4258 return expr;
4259
4260 case STRING_CST:
4261 /* If we are converting a STRING_CST to another constrained array type,
4262 just make a new one in the proper type. */
4263 if (code == ecode && AGGREGATE_TYPE_P (etype)
4264 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4265 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4266 {
4267 expr = copy_node (expr);
4268 TREE_TYPE (expr) = type;
4269 return expr;
4270 }
4271 break;
4272
4273 case VECTOR_CST:
4274 /* If we are converting a VECTOR_CST to a mere type variant, just make
4275 a new one in the proper type. */
4276 if (code == ecode && gnat_types_compatible_p (type, etype))
4277 {
4278 expr = copy_node (expr);
4279 TREE_TYPE (expr) = type;
4280 return expr;
4281 }
4282
4283 case CONSTRUCTOR:
4284 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4285 another padding type around the same type, just make a new one in
4286 the proper type. */
4287 if (code == ecode
4288 && (gnat_types_compatible_p (type, etype)
4289 || (code == RECORD_TYPE
4290 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4291 && TREE_TYPE (TYPE_FIELDS (type))
4292 == TREE_TYPE (TYPE_FIELDS (etype)))))
4293 {
4294 expr = copy_node (expr);
4295 TREE_TYPE (expr) = type;
4296 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4297 return expr;
4298 }
4299
4300 /* Likewise for a conversion between original and packable version, or
4301 conversion between types of the same size and with the same list of
4302 fields, but we have to work harder to preserve type consistency. */
4303 if (code == ecode
4304 && code == RECORD_TYPE
4305 && (TYPE_NAME (type) == TYPE_NAME (etype)
4306 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4307
4308 {
4309 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4310 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4311 vec<constructor_elt, va_gc> *v;
4312 vec_alloc (v, len);
4313 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4314 unsigned HOST_WIDE_INT idx;
4315 tree index, value;
4316
4317 /* Whether we need to clear TREE_CONSTANT et al. on the output
4318 constructor when we convert in place. */
4319 bool clear_constant = false;
4320
4321 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4322 {
4323 /* Skip the missing fields in the CONSTRUCTOR. */
4324 while (efield && field && !SAME_FIELD_P (efield, index))
4325 {
4326 efield = DECL_CHAIN (efield);
4327 field = DECL_CHAIN (field);
4328 }
4329 /* The field must be the same. */
4330 if (!(efield && field && SAME_FIELD_P (efield, field)))
4331 break;
4332 constructor_elt elt
4333 = {field, convert (TREE_TYPE (field), value)};
4334 v->quick_push (elt);
4335
4336 /* If packing has made this field a bitfield and the input
4337 value couldn't be emitted statically any more, we need to
4338 clear TREE_CONSTANT on our output. */
4339 if (!clear_constant
4340 && TREE_CONSTANT (expr)
4341 && !CONSTRUCTOR_BITFIELD_P (efield)
4342 && CONSTRUCTOR_BITFIELD_P (field)
4343 && !initializer_constant_valid_for_bitfield_p (value))
4344 clear_constant = true;
4345
4346 efield = DECL_CHAIN (efield);
4347 field = DECL_CHAIN (field);
4348 }
4349
4350 /* If we have been able to match and convert all the input fields
4351 to their output type, convert in place now. We'll fallback to a
4352 view conversion downstream otherwise. */
4353 if (idx == len)
4354 {
4355 expr = copy_node (expr);
4356 TREE_TYPE (expr) = type;
4357 CONSTRUCTOR_ELTS (expr) = v;
4358 if (clear_constant)
4359 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4360 return expr;
4361 }
4362 }
4363
4364 /* Likewise for a conversion between array type and vector type with a
4365 compatible representative array. */
4366 else if (code == VECTOR_TYPE
4367 && ecode == ARRAY_TYPE
4368 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4369 etype))
4370 {
4371 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4372 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4373 vec<constructor_elt, va_gc> *v;
4374 unsigned HOST_WIDE_INT ix;
4375 tree value;
4376
4377 /* Build a VECTOR_CST from a *constant* array constructor. */
4378 if (TREE_CONSTANT (expr))
4379 {
4380 bool constant_p = true;
4381
4382 /* Iterate through elements and check if all constructor
4383 elements are *_CSTs. */
4384 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4385 if (!CONSTANT_CLASS_P (value))
4386 {
4387 constant_p = false;
4388 break;
4389 }
4390
4391 if (constant_p)
4392 return build_vector_from_ctor (type,
4393 CONSTRUCTOR_ELTS (expr));
4394 }
4395
4396 /* Otherwise, build a regular vector constructor. */
4397 vec_alloc (v, len);
4398 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4399 {
4400 constructor_elt elt = {NULL_TREE, value};
4401 v->quick_push (elt);
4402 }
4403 expr = copy_node (expr);
4404 TREE_TYPE (expr) = type;
4405 CONSTRUCTOR_ELTS (expr) = v;
4406 return expr;
4407 }
4408 break;
4409
4410 case UNCONSTRAINED_ARRAY_REF:
4411 /* First retrieve the underlying array. */
4412 expr = maybe_unconstrained_array (expr);
4413 etype = TREE_TYPE (expr);
4414 ecode = TREE_CODE (etype);
4415 break;
4416
4417 case VIEW_CONVERT_EXPR:
4418 {
4419 /* GCC 4.x is very sensitive to type consistency overall, and view
4420 conversions thus are very frequent. Even though just "convert"ing
4421 the inner operand to the output type is fine in most cases, it
4422 might expose unexpected input/output type mismatches in special
4423 circumstances so we avoid such recursive calls when we can. */
4424 tree op0 = TREE_OPERAND (expr, 0);
4425
4426 /* If we are converting back to the original type, we can just
4427 lift the input conversion. This is a common occurrence with
4428 switches back-and-forth amongst type variants. */
4429 if (type == TREE_TYPE (op0))
4430 return op0;
4431
4432 /* Otherwise, if we're converting between two aggregate or vector
4433 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4434 target type in place or to just convert the inner expression. */
4435 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4436 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4437 {
4438 /* If we are converting between mere variants, we can just
4439 substitute the VIEW_CONVERT_EXPR in place. */
4440 if (gnat_types_compatible_p (type, etype))
4441 return build1 (VIEW_CONVERT_EXPR, type, op0);
4442
4443 /* Otherwise, we may just bypass the input view conversion unless
4444 one of the types is a fat pointer, which is handled by
4445 specialized code below which relies on exact type matching. */
4446 else if (!TYPE_IS_FAT_POINTER_P (type)
4447 && !TYPE_IS_FAT_POINTER_P (etype))
4448 return convert (type, op0);
4449 }
4450
4451 break;
4452 }
4453
4454 default:
4455 break;
4456 }
4457
4458 /* Check for converting to a pointer to an unconstrained array. */
4459 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4460 return convert_to_fat_pointer (type, expr);
4461
4462 /* If we are converting between two aggregate or vector types that are mere
4463 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4464 to a vector type from its representative array type. */
4465 else if ((code == ecode
4466 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4467 && gnat_types_compatible_p (type, etype))
4468 || (code == VECTOR_TYPE
4469 && ecode == ARRAY_TYPE
4470 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4471 etype)))
4472 return build1 (VIEW_CONVERT_EXPR, type, expr);
4473
4474 /* If we are converting between tagged types, try to upcast properly. */
4475 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4476 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4477 {
4478 tree child_etype = etype;
4479 do {
4480 tree field = TYPE_FIELDS (child_etype);
4481 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4482 return build_component_ref (expr, field, false);
4483 child_etype = TREE_TYPE (field);
4484 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4485 }
4486
4487 /* If we are converting from a smaller form of record type back to it, just
4488 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4489 size on both sides. */
4490 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4491 && smaller_form_type_p (etype, type))
4492 {
4493 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4494 false, false, false, true),
4495 expr);
4496 return build1 (VIEW_CONVERT_EXPR, type, expr);
4497 }
4498
4499 /* In all other cases of related types, make a NOP_EXPR. */
4500 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4501 return fold_convert (type, expr);
4502
4503 switch (code)
4504 {
4505 case VOID_TYPE:
4506 return fold_build1 (CONVERT_EXPR, type, expr);
4507
4508 case INTEGER_TYPE:
4509 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4510 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4511 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4512 return unchecked_convert (type, expr, false);
4513 else if (TYPE_BIASED_REPRESENTATION_P (type))
4514 return fold_convert (type,
4515 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4516 convert (TREE_TYPE (type), expr),
4517 convert (TREE_TYPE (type),
4518 TYPE_MIN_VALUE (type))));
4519
4520 /* ... fall through ... */
4521
4522 case ENUMERAL_TYPE:
4523 case BOOLEAN_TYPE:
4524 /* If we are converting an additive expression to an integer type
4525 with lower precision, be wary of the optimization that can be
4526 applied by convert_to_integer. There are 2 problematic cases:
4527 - if the first operand was originally of a biased type,
4528 because we could be recursively called to convert it
4529 to an intermediate type and thus rematerialize the
4530 additive operator endlessly,
4531 - if the expression contains a placeholder, because an
4532 intermediate conversion that changes the sign could
4533 be inserted and thus introduce an artificial overflow
4534 at compile time when the placeholder is substituted. */
4535 if (code == INTEGER_TYPE
4536 && ecode == INTEGER_TYPE
4537 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4538 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4539 {
4540 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4541
4542 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4543 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4544 || CONTAINS_PLACEHOLDER_P (expr))
4545 return build1 (NOP_EXPR, type, expr);
4546 }
4547
4548 return fold (convert_to_integer (type, expr));
4549
4550 case POINTER_TYPE:
4551 case REFERENCE_TYPE:
4552 /* If converting between two thin pointers, adjust if needed to account
4553 for differing offsets from the base pointer, depending on whether
4554 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4555 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4556 {
4557 tree etype_pos
4558 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4559 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4560 : size_zero_node;
4561 tree type_pos
4562 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4563 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4564 : size_zero_node;
4565 tree byte_diff = size_diffop (type_pos, etype_pos);
4566
4567 expr = build1 (NOP_EXPR, type, expr);
4568 if (integer_zerop (byte_diff))
4569 return expr;
4570
4571 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4572 fold_convert (sizetype, byte_diff));
4573 }
4574
4575 /* If converting fat pointer to normal or thin pointer, get the pointer
4576 to the array and then convert it. */
4577 if (TYPE_IS_FAT_POINTER_P (etype))
4578 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4579
4580 return fold (convert_to_pointer (type, expr));
4581
4582 case REAL_TYPE:
4583 return fold (convert_to_real (type, expr));
4584
4585 case RECORD_TYPE:
4586 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4587 {
4588 vec<constructor_elt, va_gc> *v;
4589 vec_alloc (v, 1);
4590
4591 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4592 convert (TREE_TYPE (TYPE_FIELDS (type)),
4593 expr));
4594 return gnat_build_constructor (type, v);
4595 }
4596
4597 /* ... fall through ... */
4598
4599 case ARRAY_TYPE:
4600 /* In these cases, assume the front-end has validated the conversion.
4601 If the conversion is valid, it will be a bit-wise conversion, so
4602 it can be viewed as an unchecked conversion. */
4603 return unchecked_convert (type, expr, false);
4604
4605 case UNION_TYPE:
4606 /* This is a either a conversion between a tagged type and some
4607 subtype, which we have to mark as a UNION_TYPE because of
4608 overlapping fields or a conversion of an Unchecked_Union. */
4609 return unchecked_convert (type, expr, false);
4610
4611 case UNCONSTRAINED_ARRAY_TYPE:
4612 /* If the input is a VECTOR_TYPE, convert to the representative
4613 array type first. */
4614 if (ecode == VECTOR_TYPE)
4615 {
4616 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4617 etype = TREE_TYPE (expr);
4618 ecode = TREE_CODE (etype);
4619 }
4620
4621 /* If EXPR is a constrained array, take its address, convert it to a
4622 fat pointer, and then dereference it. Likewise if EXPR is a
4623 record containing both a template and a constrained array.
4624 Note that a record representing a justified modular type
4625 always represents a packed constrained array. */
4626 if (ecode == ARRAY_TYPE
4627 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4628 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4629 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4630 return
4631 build_unary_op
4632 (INDIRECT_REF, NULL_TREE,
4633 convert_to_fat_pointer (TREE_TYPE (type),
4634 build_unary_op (ADDR_EXPR,
4635 NULL_TREE, expr)));
4636
4637 /* Do something very similar for converting one unconstrained
4638 array to another. */
4639 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4640 return
4641 build_unary_op (INDIRECT_REF, NULL_TREE,
4642 convert (TREE_TYPE (type),
4643 build_unary_op (ADDR_EXPR,
4644 NULL_TREE, expr)));
4645 else
4646 gcc_unreachable ();
4647
4648 case COMPLEX_TYPE:
4649 return fold (convert_to_complex (type, expr));
4650
4651 default:
4652 gcc_unreachable ();
4653 }
4654 }
4655
4656 /* Create an expression whose value is that of EXPR converted to the common
4657 index type, which is sizetype. EXPR is supposed to be in the base type
4658 of the GNAT index type. Calling it is equivalent to doing
4659
4660 convert (sizetype, expr)
4661
4662 but we try to distribute the type conversion with the knowledge that EXPR
4663 cannot overflow in its type. This is a best-effort approach and we fall
4664 back to the above expression as soon as difficulties are encountered.
4665
4666 This is necessary to overcome issues that arise when the GNAT base index
4667 type and the GCC common index type (sizetype) don't have the same size,
4668 which is quite frequent on 64-bit architectures. In this case, and if
4669 the GNAT base index type is signed but the iteration type of the loop has
4670 been forced to unsigned, the loop scalar evolution engine cannot compute
4671 a simple evolution for the general induction variables associated with the
4672 array indices, because it will preserve the wrap-around semantics in the
4673 unsigned type of their "inner" part. As a result, many loop optimizations
4674 are blocked.
4675
4676 The solution is to use a special (basic) induction variable that is at
4677 least as large as sizetype, and to express the aforementioned general
4678 induction variables in terms of this induction variable, eliminating
4679 the problematic intermediate truncation to the GNAT base index type.
4680 This is possible as long as the original expression doesn't overflow
4681 and if the middle-end hasn't introduced artificial overflows in the
4682 course of the various simplification it can make to the expression. */
4683
4684 tree
4685 convert_to_index_type (tree expr)
4686 {
4687 enum tree_code code = TREE_CODE (expr);
4688 tree type = TREE_TYPE (expr);
4689
4690 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4691 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4692 if (TYPE_UNSIGNED (type) || !optimize)
4693 return convert (sizetype, expr);
4694
4695 switch (code)
4696 {
4697 case VAR_DECL:
4698 /* The main effect of the function: replace a loop parameter with its
4699 associated special induction variable. */
4700 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4701 expr = DECL_INDUCTION_VAR (expr);
4702 break;
4703
4704 CASE_CONVERT:
4705 {
4706 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4707 /* Bail out as soon as we suspect some sort of type frobbing. */
4708 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4709 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4710 break;
4711 }
4712
4713 /* ... fall through ... */
4714
4715 case NON_LVALUE_EXPR:
4716 return fold_build1 (code, sizetype,
4717 convert_to_index_type (TREE_OPERAND (expr, 0)));
4718
4719 case PLUS_EXPR:
4720 case MINUS_EXPR:
4721 case MULT_EXPR:
4722 return fold_build2 (code, sizetype,
4723 convert_to_index_type (TREE_OPERAND (expr, 0)),
4724 convert_to_index_type (TREE_OPERAND (expr, 1)));
4725
4726 case COMPOUND_EXPR:
4727 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4728 convert_to_index_type (TREE_OPERAND (expr, 1)));
4729
4730 case COND_EXPR:
4731 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4732 convert_to_index_type (TREE_OPERAND (expr, 1)),
4733 convert_to_index_type (TREE_OPERAND (expr, 2)));
4734
4735 default:
4736 break;
4737 }
4738
4739 return convert (sizetype, expr);
4740 }
4741 \f
4742 /* Remove all conversions that are done in EXP. This includes converting
4743 from a padded type or to a justified modular type. If TRUE_ADDRESS
4744 is true, always return the address of the containing object even if
4745 the address is not bit-aligned. */
4746
4747 tree
4748 remove_conversions (tree exp, bool true_address)
4749 {
4750 switch (TREE_CODE (exp))
4751 {
4752 case CONSTRUCTOR:
4753 if (true_address
4754 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4755 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4756 return
4757 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4758 break;
4759
4760 case COMPONENT_REF:
4761 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4762 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4763 break;
4764
4765 CASE_CONVERT:
4766 case VIEW_CONVERT_EXPR:
4767 case NON_LVALUE_EXPR:
4768 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4769
4770 default:
4771 break;
4772 }
4773
4774 return exp;
4775 }
4776 \f
4777 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4778 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4779 likewise return an expression pointing to the underlying array. */
4780
4781 tree
4782 maybe_unconstrained_array (tree exp)
4783 {
4784 enum tree_code code = TREE_CODE (exp);
4785 tree type = TREE_TYPE (exp);
4786
4787 switch (TREE_CODE (type))
4788 {
4789 case UNCONSTRAINED_ARRAY_TYPE:
4790 if (code == UNCONSTRAINED_ARRAY_REF)
4791 {
4792 const bool read_only = TREE_READONLY (exp);
4793 const bool no_trap = TREE_THIS_NOTRAP (exp);
4794
4795 exp = TREE_OPERAND (exp, 0);
4796 type = TREE_TYPE (exp);
4797
4798 if (TREE_CODE (exp) == COND_EXPR)
4799 {
4800 tree op1
4801 = build_unary_op (INDIRECT_REF, NULL_TREE,
4802 build_component_ref (TREE_OPERAND (exp, 1),
4803 TYPE_FIELDS (type),
4804 false));
4805 tree op2
4806 = build_unary_op (INDIRECT_REF, NULL_TREE,
4807 build_component_ref (TREE_OPERAND (exp, 2),
4808 TYPE_FIELDS (type),
4809 false));
4810
4811 exp = build3 (COND_EXPR,
4812 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4813 TREE_OPERAND (exp, 0), op1, op2);
4814 }
4815 else
4816 {
4817 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4818 build_component_ref (exp,
4819 TYPE_FIELDS (type),
4820 false));
4821 TREE_READONLY (exp) = read_only;
4822 TREE_THIS_NOTRAP (exp) = no_trap;
4823 }
4824 }
4825
4826 else if (code == NULL_EXPR)
4827 exp = build1 (NULL_EXPR,
4828 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4829 TREE_OPERAND (exp, 0));
4830 break;
4831
4832 case RECORD_TYPE:
4833 /* If this is a padded type and it contains a template, convert to the
4834 unpadded type first. */
4835 if (TYPE_PADDING_P (type)
4836 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4837 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4838 {
4839 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4840 code = TREE_CODE (exp);
4841 type = TREE_TYPE (exp);
4842 }
4843
4844 if (TYPE_CONTAINS_TEMPLATE_P (type))
4845 {
4846 /* If the array initializer is a box, return NULL_TREE. */
4847 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4848 return NULL_TREE;
4849
4850 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4851 false);
4852 type = TREE_TYPE (exp);
4853
4854 /* If the array type is padded, convert to the unpadded type. */
4855 if (TYPE_IS_PADDING_P (type))
4856 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4857 }
4858 break;
4859
4860 default:
4861 break;
4862 }
4863
4864 return exp;
4865 }
4866 \f
4867 /* Return true if EXPR is an expression that can be folded as an operand
4868 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4869
4870 static bool
4871 can_fold_for_view_convert_p (tree expr)
4872 {
4873 tree t1, t2;
4874
4875 /* The folder will fold NOP_EXPRs between integral types with the same
4876 precision (in the middle-end's sense). We cannot allow it if the
4877 types don't have the same precision in the Ada sense as well. */
4878 if (TREE_CODE (expr) != NOP_EXPR)
4879 return true;
4880
4881 t1 = TREE_TYPE (expr);
4882 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4883
4884 /* Defer to the folder for non-integral conversions. */
4885 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4886 return true;
4887
4888 /* Only fold conversions that preserve both precisions. */
4889 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4890 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4891 return true;
4892
4893 return false;
4894 }
4895
4896 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4897 If NOTRUNC_P is true, truncation operations should be suppressed.
4898
4899 Special care is required with (source or target) integral types whose
4900 precision is not equal to their size, to make sure we fetch or assign
4901 the value bits whose location might depend on the endianness, e.g.
4902
4903 Rmsize : constant := 8;
4904 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4905
4906 type Bit_Array is array (1 .. Rmsize) of Boolean;
4907 pragma Pack (Bit_Array);
4908
4909 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4910
4911 Value : Int := 2#1000_0001#;
4912 Vbits : Bit_Array := To_Bit_Array (Value);
4913
4914 we expect the 8 bits at Vbits'Address to always contain Value, while
4915 their original location depends on the endianness, at Value'Address
4916 on a little-endian architecture but not on a big-endian one. */
4917
4918 tree
4919 unchecked_convert (tree type, tree expr, bool notrunc_p)
4920 {
4921 tree etype = TREE_TYPE (expr);
4922 enum tree_code ecode = TREE_CODE (etype);
4923 enum tree_code code = TREE_CODE (type);
4924 tree tem;
4925 int c;
4926
4927 /* If the expression is already of the right type, we are done. */
4928 if (etype == type)
4929 return expr;
4930
4931 /* If both types are integral just do a normal conversion.
4932 Likewise for a conversion to an unconstrained array. */
4933 if (((INTEGRAL_TYPE_P (type)
4934 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4935 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4936 && (INTEGRAL_TYPE_P (etype)
4937 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4938 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4939 || code == UNCONSTRAINED_ARRAY_TYPE)
4940 {
4941 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4942 {
4943 tree ntype = copy_type (etype);
4944 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4945 TYPE_MAIN_VARIANT (ntype) = ntype;
4946 expr = build1 (NOP_EXPR, ntype, expr);
4947 }
4948
4949 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4950 {
4951 tree rtype = copy_type (type);
4952 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4953 TYPE_MAIN_VARIANT (rtype) = rtype;
4954 expr = convert (rtype, expr);
4955 expr = build1 (NOP_EXPR, type, expr);
4956 }
4957 else
4958 expr = convert (type, expr);
4959 }
4960
4961 /* If we are converting to an integral type whose precision is not equal
4962 to its size, first unchecked convert to a record type that contains a
4963 field of the given precision. Then extract the result from the field.
4964
4965 There is a subtlety if the source type is an aggregate type with reverse
4966 storage order because its representation is not contiguous in the native
4967 storage order, i.e. a direct unchecked conversion to an integral type
4968 with N bits of precision cannot read the first N bits of the aggregate
4969 type. To overcome it, we do an unchecked conversion to an integral type
4970 with reverse storage order and return the resulting value. This also
4971 ensures that the result of the unchecked conversion doesn't depend on
4972 the endianness of the target machine, but only on the storage order of
4973 the aggregate type.
4974
4975 Finally, for the sake of consistency, we do the unchecked conversion
4976 to an integral type with reverse storage order as soon as the source
4977 type is an aggregate type with reverse storage order, even if there
4978 are no considerations of precision or size involved. */
4979 else if (INTEGRAL_TYPE_P (type)
4980 && TYPE_RM_SIZE (type)
4981 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
4982 TYPE_SIZE (type)) < 0
4983 || (AGGREGATE_TYPE_P (etype)
4984 && TYPE_REVERSE_STORAGE_ORDER (etype))))
4985 {
4986 tree rec_type = make_node (RECORD_TYPE);
4987 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4988 tree field_type, field;
4989
4990 if (AGGREGATE_TYPE_P (etype))
4991 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4992 = TYPE_REVERSE_STORAGE_ORDER (etype);
4993
4994 if (TYPE_UNSIGNED (type))
4995 field_type = make_unsigned_type (prec);
4996 else
4997 field_type = make_signed_type (prec);
4998 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4999
5000 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5001 NULL_TREE, bitsize_zero_node, 1, 0);
5002
5003 finish_record_type (rec_type, field, 1, false);
5004
5005 expr = unchecked_convert (rec_type, expr, notrunc_p);
5006 expr = build_component_ref (expr, field, false);
5007 expr = fold_build1 (NOP_EXPR, type, expr);
5008 }
5009
5010 /* Similarly if we are converting from an integral type whose precision is
5011 not equal to its size, first copy into a field of the given precision
5012 and unchecked convert the record type.
5013
5014 The same considerations as above apply if the target type is an aggregate
5015 type with reverse storage order and we also proceed similarly. */
5016 else if (INTEGRAL_TYPE_P (etype)
5017 && TYPE_RM_SIZE (etype)
5018 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5019 TYPE_SIZE (etype)) < 0
5020 || (AGGREGATE_TYPE_P (type)
5021 && TYPE_REVERSE_STORAGE_ORDER (type))))
5022 {
5023 tree rec_type = make_node (RECORD_TYPE);
5024 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5025 vec<constructor_elt, va_gc> *v;
5026 vec_alloc (v, 1);
5027 tree field_type, field;
5028
5029 if (AGGREGATE_TYPE_P (type))
5030 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5031 = TYPE_REVERSE_STORAGE_ORDER (type);
5032
5033 if (TYPE_UNSIGNED (etype))
5034 field_type = make_unsigned_type (prec);
5035 else
5036 field_type = make_signed_type (prec);
5037 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5038
5039 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5040 NULL_TREE, bitsize_zero_node, 1, 0);
5041
5042 finish_record_type (rec_type, field, 1, false);
5043
5044 expr = fold_build1 (NOP_EXPR, field_type, expr);
5045 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5046 expr = gnat_build_constructor (rec_type, v);
5047 expr = unchecked_convert (type, expr, notrunc_p);
5048 }
5049
5050 /* If we are converting from a scalar type to a type with a different size,
5051 we need to pad to have the same size on both sides.
5052
5053 ??? We cannot do it unconditionally because unchecked conversions are
5054 used liberally by the front-end to implement polymorphism, e.g. in:
5055
5056 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5057 return p___size__4 (p__object!(S191s.all));
5058
5059 so we skip all expressions that are references. */
5060 else if (!REFERENCE_CLASS_P (expr)
5061 && !AGGREGATE_TYPE_P (etype)
5062 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5063 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5064 {
5065 if (c < 0)
5066 {
5067 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5068 false, false, false, true),
5069 expr);
5070 expr = unchecked_convert (type, expr, notrunc_p);
5071 }
5072 else
5073 {
5074 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5075 false, false, false, true);
5076 expr = unchecked_convert (rec_type, expr, notrunc_p);
5077 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5078 }
5079 }
5080
5081 /* We have a special case when we are converting between two unconstrained
5082 array types. In that case, take the address, convert the fat pointer
5083 types, and dereference. */
5084 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5085 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5086 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5087 build_unary_op (ADDR_EXPR, NULL_TREE,
5088 expr)));
5089
5090 /* Another special case is when we are converting to a vector type from its
5091 representative array type; this a regular conversion. */
5092 else if (code == VECTOR_TYPE
5093 && ecode == ARRAY_TYPE
5094 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5095 etype))
5096 expr = convert (type, expr);
5097
5098 /* And, if the array type is not the representative, we try to build an
5099 intermediate vector type of which the array type is the representative
5100 and to do the unchecked conversion between the vector types, in order
5101 to enable further simplifications in the middle-end. */
5102 else if (code == VECTOR_TYPE
5103 && ecode == ARRAY_TYPE
5104 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5105 {
5106 expr = convert (tem, expr);
5107 return unchecked_convert (type, expr, notrunc_p);
5108 }
5109
5110 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5111 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5112 else if (TREE_CODE (expr) == CONSTRUCTOR
5113 && code == RECORD_TYPE
5114 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5115 {
5116 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5117 Empty, false, false, false, true),
5118 expr);
5119 return unchecked_convert (type, expr, notrunc_p);
5120 }
5121
5122 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5123 else
5124 {
5125 expr = maybe_unconstrained_array (expr);
5126 etype = TREE_TYPE (expr);
5127 ecode = TREE_CODE (etype);
5128 if (can_fold_for_view_convert_p (expr))
5129 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5130 else
5131 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5132 }
5133
5134 /* If the result is an integral type whose precision is not equal to its
5135 size, sign- or zero-extend the result. We need not do this if the input
5136 is an integral type of the same precision and signedness or if the output
5137 is a biased type or if both the input and output are unsigned. */
5138 if (!notrunc_p
5139 && INTEGRAL_TYPE_P (type)
5140 && TYPE_RM_SIZE (type)
5141 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5142 && !(INTEGRAL_TYPE_P (etype)
5143 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5144 && tree_int_cst_compare (TYPE_RM_SIZE (type),
5145 TYPE_RM_SIZE (etype)
5146 ? TYPE_RM_SIZE (etype)
5147 : TYPE_SIZE (etype)) == 0)
5148 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5149 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5150 {
5151 tree base_type
5152 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5153 TYPE_UNSIGNED (type));
5154 tree shift_expr
5155 = convert (base_type,
5156 size_binop (MINUS_EXPR,
5157 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5158 expr
5159 = convert (type,
5160 build_binary_op (RSHIFT_EXPR, base_type,
5161 build_binary_op (LSHIFT_EXPR, base_type,
5162 convert (base_type, expr),
5163 shift_expr),
5164 shift_expr));
5165 }
5166
5167 /* An unchecked conversion should never raise Constraint_Error. The code
5168 below assumes that GCC's conversion routines overflow the same way that
5169 the underlying hardware does. This is probably true. In the rare case
5170 when it is false, we can rely on the fact that such conversions are
5171 erroneous anyway. */
5172 if (TREE_CODE (expr) == INTEGER_CST)
5173 TREE_OVERFLOW (expr) = 0;
5174
5175 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5176 show no longer constant. */
5177 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5178 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5179 OEP_ONLY_CONST))
5180 TREE_CONSTANT (expr) = 0;
5181
5182 return expr;
5183 }
5184 \f
5185 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5186 the latter being a record type as predicated by Is_Record_Type. */
5187
5188 enum tree_code
5189 tree_code_for_record_type (Entity_Id gnat_type)
5190 {
5191 Node_Id component_list, component;
5192
5193 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5194 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5195 if (!Is_Unchecked_Union (gnat_type))
5196 return RECORD_TYPE;
5197
5198 gnat_type = Implementation_Base_Type (gnat_type);
5199 component_list
5200 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5201
5202 for (component = First_Non_Pragma (Component_Items (component_list));
5203 Present (component);
5204 component = Next_Non_Pragma (component))
5205 if (Ekind (Defining_Entity (component)) == E_Component)
5206 return RECORD_TYPE;
5207
5208 return UNION_TYPE;
5209 }
5210
5211 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5212 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5213 according to the presence of an alignment clause on the type or, if it
5214 is an array, on the component type. */
5215
5216 bool
5217 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5218 {
5219 gnat_type = Underlying_Type (gnat_type);
5220
5221 *align_clause = Present (Alignment_Clause (gnat_type));
5222
5223 if (Is_Array_Type (gnat_type))
5224 {
5225 gnat_type = Underlying_Type (Component_Type (gnat_type));
5226 if (Present (Alignment_Clause (gnat_type)))
5227 *align_clause = true;
5228 }
5229
5230 if (!Is_Floating_Point_Type (gnat_type))
5231 return false;
5232
5233 if (UI_To_Int (Esize (gnat_type)) != 64)
5234 return false;
5235
5236 return true;
5237 }
5238
5239 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5240 size is greater or equal to 64 bits, or an array of such a type. Set
5241 ALIGN_CLAUSE according to the presence of an alignment clause on the
5242 type or, if it is an array, on the component type. */
5243
5244 bool
5245 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5246 {
5247 gnat_type = Underlying_Type (gnat_type);
5248
5249 *align_clause = Present (Alignment_Clause (gnat_type));
5250
5251 if (Is_Array_Type (gnat_type))
5252 {
5253 gnat_type = Underlying_Type (Component_Type (gnat_type));
5254 if (Present (Alignment_Clause (gnat_type)))
5255 *align_clause = true;
5256 }
5257
5258 if (!Is_Scalar_Type (gnat_type))
5259 return false;
5260
5261 if (UI_To_Int (Esize (gnat_type)) < 64)
5262 return false;
5263
5264 return true;
5265 }
5266
5267 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5268 component of an aggregate type. */
5269
5270 bool
5271 type_for_nonaliased_component_p (tree gnu_type)
5272 {
5273 /* If the type is passed by reference, we may have pointers to the
5274 component so it cannot be made non-aliased. */
5275 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5276 return false;
5277
5278 /* We used to say that any component of aggregate type is aliased
5279 because the front-end may take 'Reference of it. The front-end
5280 has been enhanced in the meantime so as to use a renaming instead
5281 in most cases, but the back-end can probably take the address of
5282 such a component too so we go for the conservative stance.
5283
5284 For instance, we might need the address of any array type, even
5285 if normally passed by copy, to construct a fat pointer if the
5286 component is used as an actual for an unconstrained formal.
5287
5288 Likewise for record types: even if a specific record subtype is
5289 passed by copy, the parent type might be passed by ref (e.g. if
5290 it's of variable size) and we might take the address of a child
5291 component to pass to a parent formal. We have no way to check
5292 for such conditions here. */
5293 if (AGGREGATE_TYPE_P (gnu_type))
5294 return false;
5295
5296 return true;
5297 }
5298
5299 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5300
5301 bool
5302 smaller_form_type_p (tree type, tree orig_type)
5303 {
5304 tree size, osize;
5305
5306 /* We're not interested in variants here. */
5307 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5308 return false;
5309
5310 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5311 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5312 return false;
5313
5314 size = TYPE_SIZE (type);
5315 osize = TYPE_SIZE (orig_type);
5316
5317 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5318 return false;
5319
5320 return tree_int_cst_lt (size, osize) != 0;
5321 }
5322
5323 /* Perform final processing on global declarations. */
5324
5325 static GTY (()) tree dummy_global;
5326
5327 void
5328 gnat_write_global_declarations (void)
5329 {
5330 unsigned int i;
5331 tree iter;
5332
5333 /* If we have declared types as used at the global level, insert them in
5334 the global hash table. We use a dummy variable for this purpose, but
5335 we need to build it unconditionally to avoid -fcompare-debug issues. */
5336 if (first_global_object_name)
5337 {
5338 struct varpool_node *node;
5339 char *label;
5340
5341 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5342 dummy_global
5343 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5344 void_type_node);
5345 DECL_HARD_REGISTER (dummy_global) = 1;
5346 TREE_STATIC (dummy_global) = 1;
5347 node = varpool_node::get_create (dummy_global);
5348 node->definition = 1;
5349 node->force_output = 1;
5350
5351 if (types_used_by_cur_var_decl)
5352 while (!types_used_by_cur_var_decl->is_empty ())
5353 {
5354 tree t = types_used_by_cur_var_decl->pop ();
5355 types_used_by_var_decl_insert (t, dummy_global);
5356 }
5357 }
5358
5359 /* Output debug information for all global type declarations first. This
5360 ensures that global types whose compilation hasn't been finalized yet,
5361 for example pointers to Taft amendment types, have their compilation
5362 finalized in the right context. */
5363 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5364 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5365 debug_hooks->type_decl (iter, false);
5366
5367 /* Then output the global variables. We need to do that after the debug
5368 information for global types is emitted so that they are finalized. */
5369 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5370 if (TREE_CODE (iter) == VAR_DECL)
5371 rest_of_decl_compilation (iter, true, 0);
5372
5373 /* Output the imported modules/declarations. In GNAT, these are only
5374 materializing subprogram. */
5375 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5376 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5377 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5378 DECL_CONTEXT (iter), 0);
5379 }
5380
5381 /* ************************************************************************
5382 * * GCC builtins support *
5383 * ************************************************************************ */
5384
5385 /* The general scheme is fairly simple:
5386
5387 For each builtin function/type to be declared, gnat_install_builtins calls
5388 internal facilities which eventually get to gnat_pushdecl, which in turn
5389 tracks the so declared builtin function decls in the 'builtin_decls' global
5390 datastructure. When an Intrinsic subprogram declaration is processed, we
5391 search this global datastructure to retrieve the associated BUILT_IN DECL
5392 node. */
5393
5394 /* Search the chain of currently available builtin declarations for a node
5395 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5396 found, if any, or NULL_TREE otherwise. */
5397 tree
5398 builtin_decl_for (tree name)
5399 {
5400 unsigned i;
5401 tree decl;
5402
5403 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5404 if (DECL_NAME (decl) == name)
5405 return decl;
5406
5407 return NULL_TREE;
5408 }
5409
5410 /* The code below eventually exposes gnat_install_builtins, which declares
5411 the builtin types and functions we might need, either internally or as
5412 user accessible facilities.
5413
5414 ??? This is a first implementation shot, still in rough shape. It is
5415 heavily inspired from the "C" family implementation, with chunks copied
5416 verbatim from there.
5417
5418 Two obvious improvement candidates are:
5419 o Use a more efficient name/decl mapping scheme
5420 o Devise a middle-end infrastructure to avoid having to copy
5421 pieces between front-ends. */
5422
5423 /* ----------------------------------------------------------------------- *
5424 * BUILTIN ELEMENTARY TYPES *
5425 * ----------------------------------------------------------------------- */
5426
5427 /* Standard data types to be used in builtin argument declarations. */
5428
5429 enum c_tree_index
5430 {
5431 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5432 CTI_STRING_TYPE,
5433 CTI_CONST_STRING_TYPE,
5434
5435 CTI_MAX
5436 };
5437
5438 static tree c_global_trees[CTI_MAX];
5439
5440 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5441 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5442 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5443
5444 /* ??? In addition some attribute handlers, we currently don't support a
5445 (small) number of builtin-types, which in turns inhibits support for a
5446 number of builtin functions. */
5447 #define wint_type_node void_type_node
5448 #define intmax_type_node void_type_node
5449 #define uintmax_type_node void_type_node
5450
5451 /* Build the void_list_node (void_type_node having been created). */
5452
5453 static tree
5454 build_void_list_node (void)
5455 {
5456 tree t = build_tree_list (NULL_TREE, void_type_node);
5457 return t;
5458 }
5459
5460 /* Used to help initialize the builtin-types.def table. When a type of
5461 the correct size doesn't exist, use error_mark_node instead of NULL.
5462 The later results in segfaults even when a decl using the type doesn't
5463 get invoked. */
5464
5465 static tree
5466 builtin_type_for_size (int size, bool unsignedp)
5467 {
5468 tree type = gnat_type_for_size (size, unsignedp);
5469 return type ? type : error_mark_node;
5470 }
5471
5472 /* Build/push the elementary type decls that builtin functions/types
5473 will need. */
5474
5475 static void
5476 install_builtin_elementary_types (void)
5477 {
5478 signed_size_type_node = gnat_signed_type_for (size_type_node);
5479 pid_type_node = integer_type_node;
5480 void_list_node = build_void_list_node ();
5481
5482 string_type_node = build_pointer_type (char_type_node);
5483 const_string_type_node
5484 = build_pointer_type (build_qualified_type
5485 (char_type_node, TYPE_QUAL_CONST));
5486 }
5487
5488 /* ----------------------------------------------------------------------- *
5489 * BUILTIN FUNCTION TYPES *
5490 * ----------------------------------------------------------------------- */
5491
5492 /* Now, builtin function types per se. */
5493
5494 enum c_builtin_type
5495 {
5496 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5497 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5498 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5499 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5500 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5501 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5502 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5503 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5504 ARG6) NAME,
5505 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5506 ARG6, ARG7) NAME,
5507 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5508 ARG6, ARG7, ARG8) NAME,
5509 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5510 ARG6, ARG7, ARG8, ARG9) NAME,
5511 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5512 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5513 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5514 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5515 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5516 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5517 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5518 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5519 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5520 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5521 NAME,
5522 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5523 ARG6) NAME,
5524 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5525 ARG6, ARG7) NAME,
5526 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5527 #include "builtin-types.def"
5528 #undef DEF_PRIMITIVE_TYPE
5529 #undef DEF_FUNCTION_TYPE_0
5530 #undef DEF_FUNCTION_TYPE_1
5531 #undef DEF_FUNCTION_TYPE_2
5532 #undef DEF_FUNCTION_TYPE_3
5533 #undef DEF_FUNCTION_TYPE_4
5534 #undef DEF_FUNCTION_TYPE_5
5535 #undef DEF_FUNCTION_TYPE_6
5536 #undef DEF_FUNCTION_TYPE_7
5537 #undef DEF_FUNCTION_TYPE_8
5538 #undef DEF_FUNCTION_TYPE_9
5539 #undef DEF_FUNCTION_TYPE_10
5540 #undef DEF_FUNCTION_TYPE_11
5541 #undef DEF_FUNCTION_TYPE_VAR_0
5542 #undef DEF_FUNCTION_TYPE_VAR_1
5543 #undef DEF_FUNCTION_TYPE_VAR_2
5544 #undef DEF_FUNCTION_TYPE_VAR_3
5545 #undef DEF_FUNCTION_TYPE_VAR_4
5546 #undef DEF_FUNCTION_TYPE_VAR_5
5547 #undef DEF_FUNCTION_TYPE_VAR_6
5548 #undef DEF_FUNCTION_TYPE_VAR_7
5549 #undef DEF_POINTER_TYPE
5550 BT_LAST
5551 };
5552
5553 typedef enum c_builtin_type builtin_type;
5554
5555 /* A temporary array used in communication with def_fn_type. */
5556 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5557
5558 /* A helper function for install_builtin_types. Build function type
5559 for DEF with return type RET and N arguments. If VAR is true, then the
5560 function should be variadic after those N arguments.
5561
5562 Takes special care not to ICE if any of the types involved are
5563 error_mark_node, which indicates that said type is not in fact available
5564 (see builtin_type_for_size). In which case the function type as a whole
5565 should be error_mark_node. */
5566
5567 static void
5568 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5569 {
5570 tree t;
5571 tree *args = XALLOCAVEC (tree, n);
5572 va_list list;
5573 int i;
5574
5575 va_start (list, n);
5576 for (i = 0; i < n; ++i)
5577 {
5578 builtin_type a = (builtin_type) va_arg (list, int);
5579 t = builtin_types[a];
5580 if (t == error_mark_node)
5581 goto egress;
5582 args[i] = t;
5583 }
5584
5585 t = builtin_types[ret];
5586 if (t == error_mark_node)
5587 goto egress;
5588 if (var)
5589 t = build_varargs_function_type_array (t, n, args);
5590 else
5591 t = build_function_type_array (t, n, args);
5592
5593 egress:
5594 builtin_types[def] = t;
5595 va_end (list);
5596 }
5597
5598 /* Build the builtin function types and install them in the builtin_types
5599 array for later use in builtin function decls. */
5600
5601 static void
5602 install_builtin_function_types (void)
5603 {
5604 tree va_list_ref_type_node;
5605 tree va_list_arg_type_node;
5606
5607 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5608 {
5609 va_list_arg_type_node = va_list_ref_type_node =
5610 build_pointer_type (TREE_TYPE (va_list_type_node));
5611 }
5612 else
5613 {
5614 va_list_arg_type_node = va_list_type_node;
5615 va_list_ref_type_node = build_reference_type (va_list_type_node);
5616 }
5617
5618 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5619 builtin_types[ENUM] = VALUE;
5620 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5621 def_fn_type (ENUM, RETURN, 0, 0);
5622 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5623 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5624 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5625 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5626 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5627 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5628 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5629 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5630 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5631 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5632 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5633 ARG6) \
5634 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5635 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5636 ARG6, ARG7) \
5637 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5638 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5639 ARG6, ARG7, ARG8) \
5640 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5641 ARG7, ARG8);
5642 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5643 ARG6, ARG7, ARG8, ARG9) \
5644 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5645 ARG7, ARG8, ARG9);
5646 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5647 ARG6, ARG7, ARG8, ARG9, ARG10) \
5648 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5649 ARG7, ARG8, ARG9, ARG10);
5650 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5651 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5652 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5653 ARG7, ARG8, ARG9, ARG10, ARG11);
5654 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5655 def_fn_type (ENUM, RETURN, 1, 0);
5656 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5657 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5658 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5659 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5660 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5661 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5662 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5663 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5664 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5665 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5666 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5667 ARG6) \
5668 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5669 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5670 ARG6, ARG7) \
5671 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5672 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5673 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5674
5675 #include "builtin-types.def"
5676
5677 #undef DEF_PRIMITIVE_TYPE
5678 #undef DEF_FUNCTION_TYPE_0
5679 #undef DEF_FUNCTION_TYPE_1
5680 #undef DEF_FUNCTION_TYPE_2
5681 #undef DEF_FUNCTION_TYPE_3
5682 #undef DEF_FUNCTION_TYPE_4
5683 #undef DEF_FUNCTION_TYPE_5
5684 #undef DEF_FUNCTION_TYPE_6
5685 #undef DEF_FUNCTION_TYPE_7
5686 #undef DEF_FUNCTION_TYPE_8
5687 #undef DEF_FUNCTION_TYPE_9
5688 #undef DEF_FUNCTION_TYPE_10
5689 #undef DEF_FUNCTION_TYPE_11
5690 #undef DEF_FUNCTION_TYPE_VAR_0
5691 #undef DEF_FUNCTION_TYPE_VAR_1
5692 #undef DEF_FUNCTION_TYPE_VAR_2
5693 #undef DEF_FUNCTION_TYPE_VAR_3
5694 #undef DEF_FUNCTION_TYPE_VAR_4
5695 #undef DEF_FUNCTION_TYPE_VAR_5
5696 #undef DEF_FUNCTION_TYPE_VAR_6
5697 #undef DEF_FUNCTION_TYPE_VAR_7
5698 #undef DEF_POINTER_TYPE
5699 builtin_types[(int) BT_LAST] = NULL_TREE;
5700 }
5701
5702 /* ----------------------------------------------------------------------- *
5703 * BUILTIN ATTRIBUTES *
5704 * ----------------------------------------------------------------------- */
5705
5706 enum built_in_attribute
5707 {
5708 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5709 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5710 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5711 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5712 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5713 #include "builtin-attrs.def"
5714 #undef DEF_ATTR_NULL_TREE
5715 #undef DEF_ATTR_INT
5716 #undef DEF_ATTR_STRING
5717 #undef DEF_ATTR_IDENT
5718 #undef DEF_ATTR_TREE_LIST
5719 ATTR_LAST
5720 };
5721
5722 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5723
5724 static void
5725 install_builtin_attributes (void)
5726 {
5727 /* Fill in the built_in_attributes array. */
5728 #define DEF_ATTR_NULL_TREE(ENUM) \
5729 built_in_attributes[(int) ENUM] = NULL_TREE;
5730 #define DEF_ATTR_INT(ENUM, VALUE) \
5731 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5732 #define DEF_ATTR_STRING(ENUM, VALUE) \
5733 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5734 #define DEF_ATTR_IDENT(ENUM, STRING) \
5735 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5736 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5737 built_in_attributes[(int) ENUM] \
5738 = tree_cons (built_in_attributes[(int) PURPOSE], \
5739 built_in_attributes[(int) VALUE], \
5740 built_in_attributes[(int) CHAIN]);
5741 #include "builtin-attrs.def"
5742 #undef DEF_ATTR_NULL_TREE
5743 #undef DEF_ATTR_INT
5744 #undef DEF_ATTR_STRING
5745 #undef DEF_ATTR_IDENT
5746 #undef DEF_ATTR_TREE_LIST
5747 }
5748
5749 /* Handle a "const" attribute; arguments as in
5750 struct attribute_spec.handler. */
5751
5752 static tree
5753 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5754 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5755 bool *no_add_attrs)
5756 {
5757 if (TREE_CODE (*node) == FUNCTION_DECL)
5758 TREE_READONLY (*node) = 1;
5759 else
5760 *no_add_attrs = true;
5761
5762 return NULL_TREE;
5763 }
5764
5765 /* Handle a "nothrow" attribute; arguments as in
5766 struct attribute_spec.handler. */
5767
5768 static tree
5769 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5770 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5771 bool *no_add_attrs)
5772 {
5773 if (TREE_CODE (*node) == FUNCTION_DECL)
5774 TREE_NOTHROW (*node) = 1;
5775 else
5776 *no_add_attrs = true;
5777
5778 return NULL_TREE;
5779 }
5780
5781 /* Handle a "pure" attribute; arguments as in
5782 struct attribute_spec.handler. */
5783
5784 static tree
5785 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5786 int ARG_UNUSED (flags), bool *no_add_attrs)
5787 {
5788 if (TREE_CODE (*node) == FUNCTION_DECL)
5789 DECL_PURE_P (*node) = 1;
5790 /* TODO: support types. */
5791 else
5792 {
5793 warning (OPT_Wattributes, "%qs attribute ignored",
5794 IDENTIFIER_POINTER (name));
5795 *no_add_attrs = true;
5796 }
5797
5798 return NULL_TREE;
5799 }
5800
5801 /* Handle a "no vops" attribute; arguments as in
5802 struct attribute_spec.handler. */
5803
5804 static tree
5805 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5806 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5807 bool *ARG_UNUSED (no_add_attrs))
5808 {
5809 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5810 DECL_IS_NOVOPS (*node) = 1;
5811 return NULL_TREE;
5812 }
5813
5814 /* Helper for nonnull attribute handling; fetch the operand number
5815 from the attribute argument list. */
5816
5817 static bool
5818 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5819 {
5820 /* Verify the arg number is a constant. */
5821 if (!tree_fits_uhwi_p (arg_num_expr))
5822 return false;
5823
5824 *valp = TREE_INT_CST_LOW (arg_num_expr);
5825 return true;
5826 }
5827
5828 /* Handle the "nonnull" attribute. */
5829 static tree
5830 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5831 tree args, int ARG_UNUSED (flags),
5832 bool *no_add_attrs)
5833 {
5834 tree type = *node;
5835 unsigned HOST_WIDE_INT attr_arg_num;
5836
5837 /* If no arguments are specified, all pointer arguments should be
5838 non-null. Verify a full prototype is given so that the arguments
5839 will have the correct types when we actually check them later. */
5840 if (!args)
5841 {
5842 if (!prototype_p (type))
5843 {
5844 error ("nonnull attribute without arguments on a non-prototype");
5845 *no_add_attrs = true;
5846 }
5847 return NULL_TREE;
5848 }
5849
5850 /* Argument list specified. Verify that each argument number references
5851 a pointer argument. */
5852 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5853 {
5854 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5855
5856 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5857 {
5858 error ("nonnull argument has invalid operand number (argument %lu)",
5859 (unsigned long) attr_arg_num);
5860 *no_add_attrs = true;
5861 return NULL_TREE;
5862 }
5863
5864 if (prototype_p (type))
5865 {
5866 function_args_iterator iter;
5867 tree argument;
5868
5869 function_args_iter_init (&iter, type);
5870 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5871 {
5872 argument = function_args_iter_cond (&iter);
5873 if (!argument || ck_num == arg_num)
5874 break;
5875 }
5876
5877 if (!argument
5878 || TREE_CODE (argument) == VOID_TYPE)
5879 {
5880 error ("nonnull argument with out-of-range operand number "
5881 "(argument %lu, operand %lu)",
5882 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5883 *no_add_attrs = true;
5884 return NULL_TREE;
5885 }
5886
5887 if (TREE_CODE (argument) != POINTER_TYPE)
5888 {
5889 error ("nonnull argument references non-pointer operand "
5890 "(argument %lu, operand %lu)",
5891 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5892 *no_add_attrs = true;
5893 return NULL_TREE;
5894 }
5895 }
5896 }
5897
5898 return NULL_TREE;
5899 }
5900
5901 /* Handle a "sentinel" attribute. */
5902
5903 static tree
5904 handle_sentinel_attribute (tree *node, tree name, tree args,
5905 int ARG_UNUSED (flags), bool *no_add_attrs)
5906 {
5907 if (!prototype_p (*node))
5908 {
5909 warning (OPT_Wattributes,
5910 "%qs attribute requires prototypes with named arguments",
5911 IDENTIFIER_POINTER (name));
5912 *no_add_attrs = true;
5913 }
5914 else
5915 {
5916 if (!stdarg_p (*node))
5917 {
5918 warning (OPT_Wattributes,
5919 "%qs attribute only applies to variadic functions",
5920 IDENTIFIER_POINTER (name));
5921 *no_add_attrs = true;
5922 }
5923 }
5924
5925 if (args)
5926 {
5927 tree position = TREE_VALUE (args);
5928
5929 if (TREE_CODE (position) != INTEGER_CST)
5930 {
5931 warning (0, "requested position is not an integer constant");
5932 *no_add_attrs = true;
5933 }
5934 else
5935 {
5936 if (tree_int_cst_lt (position, integer_zero_node))
5937 {
5938 warning (0, "requested position is less than zero");
5939 *no_add_attrs = true;
5940 }
5941 }
5942 }
5943
5944 return NULL_TREE;
5945 }
5946
5947 /* Handle a "noreturn" attribute; arguments as in
5948 struct attribute_spec.handler. */
5949
5950 static tree
5951 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5952 int ARG_UNUSED (flags), bool *no_add_attrs)
5953 {
5954 tree type = TREE_TYPE (*node);
5955
5956 /* See FIXME comment in c_common_attribute_table. */
5957 if (TREE_CODE (*node) == FUNCTION_DECL)
5958 TREE_THIS_VOLATILE (*node) = 1;
5959 else if (TREE_CODE (type) == POINTER_TYPE
5960 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5961 TREE_TYPE (*node)
5962 = build_pointer_type
5963 (build_type_variant (TREE_TYPE (type),
5964 TYPE_READONLY (TREE_TYPE (type)), 1));
5965 else
5966 {
5967 warning (OPT_Wattributes, "%qs attribute ignored",
5968 IDENTIFIER_POINTER (name));
5969 *no_add_attrs = true;
5970 }
5971
5972 return NULL_TREE;
5973 }
5974
5975 /* Handle a "leaf" attribute; arguments as in
5976 struct attribute_spec.handler. */
5977
5978 static tree
5979 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5980 int ARG_UNUSED (flags), bool *no_add_attrs)
5981 {
5982 if (TREE_CODE (*node) != FUNCTION_DECL)
5983 {
5984 warning (OPT_Wattributes, "%qE attribute ignored", name);
5985 *no_add_attrs = true;
5986 }
5987 if (!TREE_PUBLIC (*node))
5988 {
5989 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5990 *no_add_attrs = true;
5991 }
5992
5993 return NULL_TREE;
5994 }
5995
5996 /* Handle a "always_inline" attribute; arguments as in
5997 struct attribute_spec.handler. */
5998
5999 static tree
6000 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6001 int ARG_UNUSED (flags), bool *no_add_attrs)
6002 {
6003 if (TREE_CODE (*node) == FUNCTION_DECL)
6004 {
6005 /* Set the attribute and mark it for disregarding inline limits. */
6006 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6007 }
6008 else
6009 {
6010 warning (OPT_Wattributes, "%qE attribute ignored", name);
6011 *no_add_attrs = true;
6012 }
6013
6014 return NULL_TREE;
6015 }
6016
6017 /* Handle a "malloc" attribute; arguments as in
6018 struct attribute_spec.handler. */
6019
6020 static tree
6021 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6022 int ARG_UNUSED (flags), bool *no_add_attrs)
6023 {
6024 if (TREE_CODE (*node) == FUNCTION_DECL
6025 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6026 DECL_IS_MALLOC (*node) = 1;
6027 else
6028 {
6029 warning (OPT_Wattributes, "%qs attribute ignored",
6030 IDENTIFIER_POINTER (name));
6031 *no_add_attrs = true;
6032 }
6033
6034 return NULL_TREE;
6035 }
6036
6037 /* Fake handler for attributes we don't properly support. */
6038
6039 tree
6040 fake_attribute_handler (tree * ARG_UNUSED (node),
6041 tree ARG_UNUSED (name),
6042 tree ARG_UNUSED (args),
6043 int ARG_UNUSED (flags),
6044 bool * ARG_UNUSED (no_add_attrs))
6045 {
6046 return NULL_TREE;
6047 }
6048
6049 /* Handle a "type_generic" attribute. */
6050
6051 static tree
6052 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6053 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6054 bool * ARG_UNUSED (no_add_attrs))
6055 {
6056 /* Ensure we have a function type. */
6057 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6058
6059 /* Ensure we have a variadic function. */
6060 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6061
6062 return NULL_TREE;
6063 }
6064
6065 /* Handle a "vector_size" attribute; arguments as in
6066 struct attribute_spec.handler. */
6067
6068 static tree
6069 handle_vector_size_attribute (tree *node, tree name, tree args,
6070 int ARG_UNUSED (flags), bool *no_add_attrs)
6071 {
6072 tree type = *node;
6073 tree vector_type;
6074
6075 *no_add_attrs = true;
6076
6077 /* We need to provide for vector pointers, vector arrays, and
6078 functions returning vectors. For example:
6079
6080 __attribute__((vector_size(16))) short *foo;
6081
6082 In this case, the mode is SI, but the type being modified is
6083 HI, so we need to look further. */
6084 while (POINTER_TYPE_P (type)
6085 || TREE_CODE (type) == FUNCTION_TYPE
6086 || TREE_CODE (type) == ARRAY_TYPE)
6087 type = TREE_TYPE (type);
6088
6089 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6090 if (!vector_type)
6091 return NULL_TREE;
6092
6093 /* Build back pointers if needed. */
6094 *node = reconstruct_complex_type (*node, vector_type);
6095
6096 return NULL_TREE;
6097 }
6098
6099 /* Handle a "vector_type" attribute; arguments as in
6100 struct attribute_spec.handler. */
6101
6102 static tree
6103 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6104 int ARG_UNUSED (flags), bool *no_add_attrs)
6105 {
6106 tree type = *node;
6107 tree vector_type;
6108
6109 *no_add_attrs = true;
6110
6111 if (TREE_CODE (type) != ARRAY_TYPE)
6112 {
6113 error ("attribute %qs applies to array types only",
6114 IDENTIFIER_POINTER (name));
6115 return NULL_TREE;
6116 }
6117
6118 vector_type = build_vector_type_for_array (type, name);
6119 if (!vector_type)
6120 return NULL_TREE;
6121
6122 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6123 *node = vector_type;
6124
6125 return NULL_TREE;
6126 }
6127
6128 /* ----------------------------------------------------------------------- *
6129 * BUILTIN FUNCTIONS *
6130 * ----------------------------------------------------------------------- */
6131
6132 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6133 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6134 if nonansi_p and flag_no_nonansi_builtin. */
6135
6136 static void
6137 def_builtin_1 (enum built_in_function fncode,
6138 const char *name,
6139 enum built_in_class fnclass,
6140 tree fntype, tree libtype,
6141 bool both_p, bool fallback_p,
6142 bool nonansi_p ATTRIBUTE_UNUSED,
6143 tree fnattrs, bool implicit_p)
6144 {
6145 tree decl;
6146 const char *libname;
6147
6148 /* Preserve an already installed decl. It most likely was setup in advance
6149 (e.g. as part of the internal builtins) for specific reasons. */
6150 if (builtin_decl_explicit (fncode))
6151 return;
6152
6153 gcc_assert ((!both_p && !fallback_p)
6154 || !strncmp (name, "__builtin_",
6155 strlen ("__builtin_")));
6156
6157 libname = name + strlen ("__builtin_");
6158 decl = add_builtin_function (name, fntype, fncode, fnclass,
6159 (fallback_p ? libname : NULL),
6160 fnattrs);
6161 if (both_p)
6162 /* ??? This is normally further controlled by command-line options
6163 like -fno-builtin, but we don't have them for Ada. */
6164 add_builtin_function (libname, libtype, fncode, fnclass,
6165 NULL, fnattrs);
6166
6167 set_builtin_decl (fncode, decl, implicit_p);
6168 }
6169
6170 static int flag_isoc94 = 0;
6171 static int flag_isoc99 = 0;
6172 static int flag_isoc11 = 0;
6173
6174 /* Install what the common builtins.def offers. */
6175
6176 static void
6177 install_builtin_functions (void)
6178 {
6179 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6180 NONANSI_P, ATTRS, IMPLICIT, COND) \
6181 if (NAME && COND) \
6182 def_builtin_1 (ENUM, NAME, CLASS, \
6183 builtin_types[(int) TYPE], \
6184 builtin_types[(int) LIBTYPE], \
6185 BOTH_P, FALLBACK_P, NONANSI_P, \
6186 built_in_attributes[(int) ATTRS], IMPLICIT);
6187 #include "builtins.def"
6188 }
6189
6190 /* ----------------------------------------------------------------------- *
6191 * BUILTIN FUNCTIONS *
6192 * ----------------------------------------------------------------------- */
6193
6194 /* Install the builtin functions we might need. */
6195
6196 void
6197 gnat_install_builtins (void)
6198 {
6199 install_builtin_elementary_types ();
6200 install_builtin_function_types ();
6201 install_builtin_attributes ();
6202
6203 /* Install builtins used by generic middle-end pieces first. Some of these
6204 know about internal specificities and control attributes accordingly, for
6205 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6206 the generic definition from builtins.def. */
6207 build_common_builtin_nodes ();
6208
6209 /* Now, install the target specific builtins, such as the AltiVec family on
6210 ppc, and the common set as exposed by builtins.def. */
6211 targetm.init_builtins ();
6212 install_builtin_functions ();
6213 }
6214
6215 #include "gt-ada-utils.h"
6216 #include "gtype-ada.h"