gigi.h (create_var_decl): Adjust prototype.
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
99
100 /* Fake handler for attributes we don't properly support, typically because
101 they'd require dragging a lot of the common-c front-end circuitry. */
102 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
103
104 /* Table of machine-independent internal attributes for Ada. We support
105 this minimal set of attributes to accommodate the needs of builtins. */
106 const struct attribute_spec gnat_internal_attribute_table[] =
107 {
108 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
109 affects_type_identity } */
110 { "const", 0, 0, true, false, false, handle_const_attribute,
111 false },
112 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
113 false },
114 { "pure", 0, 0, true, false, false, handle_pure_attribute,
115 false },
116 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
117 false },
118 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
119 false },
120 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
121 false },
122 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
123 false },
124 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
125 false },
126 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
144
145 { NULL, 0, 0, false, false, false, NULL, false }
146 };
147
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196 };
197
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level *current_binding_level;
200
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context;
206
207 /* An array of global declarations. */
208 static GTY(()) vec<tree, va_gc> *global_decls;
209
210 /* An array of builtin function declarations. */
211 static GTY(()) vec<tree, va_gc> *builtin_decls;
212
213 /* A chain of unused BLOCK nodes. */
214 static GTY((deletable)) tree free_block_chain;
215
216 /* A hash table of padded types. It is modelled on the generic type
217 hash table in tree.c, which must thus be used as a reference. */
218
219 struct GTY((for_user)) pad_type_hash {
220 unsigned long hash;
221 tree type;
222 };
223
224 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
225 {
226 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
227 static bool equal (pad_type_hash *a, pad_type_hash *b);
228 static int keep_cache_entry (pad_type_hash *&);
229 };
230
231 static GTY ((cache))
232 hash_table<pad_type_hasher> *pad_type_hash_table;
233
234 static tree merge_sizes (tree, tree, tree, bool, bool);
235 static tree compute_related_constant (tree, tree);
236 static tree split_plus (tree, tree *);
237 static tree float_type_for_precision (int, machine_mode);
238 static tree convert_to_fat_pointer (tree, tree);
239 static unsigned int scale_by_factor_of (tree, unsigned int);
240 static bool potential_alignment_gap (tree, tree, tree);
241
242 /* A linked list used as a queue to defer the initialization of the
243 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
244 of ..._TYPE nodes. */
245 struct deferred_decl_context_node
246 {
247 tree decl; /* The ..._DECL node to work on. */
248 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
249 int force_global; /* force_global value when pushing DECL. */
250 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
251 context to. */
252 struct deferred_decl_context_node *next; /* The next queue item. */
253 };
254
255 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
256
257 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
258 feed it with the elaboration of GNAT_SCOPE. */
259 static struct deferred_decl_context_node *
260 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
261
262 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
263 feed it with the DECL_CONTEXT computed as part of N as soon as it is
264 computed. */
265 static void add_deferred_type_context (struct deferred_decl_context_node *n,
266 tree type);
267 \f
268 /* Initialize data structures of the utils.c module. */
269
270 void
271 init_gnat_utils (void)
272 {
273 /* Initialize the association of GNAT nodes to GCC trees. */
274 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
275
276 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
277 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
278
279 /* Initialize the hash table of padded types. */
280 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
281 }
282
283 /* Destroy data structures of the utils.c module. */
284
285 void
286 destroy_gnat_utils (void)
287 {
288 /* Destroy the association of GNAT nodes to GCC trees. */
289 ggc_free (associate_gnat_to_gnu);
290 associate_gnat_to_gnu = NULL;
291
292 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
293 ggc_free (dummy_node_table);
294 dummy_node_table = NULL;
295
296 /* Destroy the hash table of padded types. */
297 pad_type_hash_table->empty ();
298 pad_type_hash_table = NULL;
299 }
300 \f
301 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
302 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
303 If NO_CHECK is true, the latter check is suppressed.
304
305 If GNU_DECL is zero, reset a previous association. */
306
307 void
308 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
309 {
310 /* Check that GNAT_ENTITY is not already defined and that it is being set
311 to something which is a decl. If that is not the case, this usually
312 means GNAT_ENTITY is defined twice, but occasionally is due to some
313 Gigi problem. */
314 gcc_assert (!(gnu_decl
315 && (PRESENT_GNU_TREE (gnat_entity)
316 || (!no_check && !DECL_P (gnu_decl)))));
317
318 SET_GNU_TREE (gnat_entity, gnu_decl);
319 }
320
321 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
322 that was associated with it. If there is no such tree node, abort.
323
324 In some cases, such as delayed elaboration or expressions that need to
325 be elaborated only once, GNAT_ENTITY is really not an entity. */
326
327 tree
328 get_gnu_tree (Entity_Id gnat_entity)
329 {
330 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
331 return GET_GNU_TREE (gnat_entity);
332 }
333
334 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
335
336 bool
337 present_gnu_tree (Entity_Id gnat_entity)
338 {
339 return PRESENT_GNU_TREE (gnat_entity);
340 }
341 \f
342 /* Make a dummy type corresponding to GNAT_TYPE. */
343
344 tree
345 make_dummy_type (Entity_Id gnat_type)
346 {
347 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
348 tree gnu_type;
349
350 /* If there was no equivalent type (can only happen when just annotating
351 types) or underlying type, go back to the original type. */
352 if (No (gnat_equiv))
353 gnat_equiv = gnat_type;
354
355 /* If it there already a dummy type, use that one. Else make one. */
356 if (PRESENT_DUMMY_NODE (gnat_equiv))
357 return GET_DUMMY_NODE (gnat_equiv);
358
359 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
360 an ENUMERAL_TYPE. */
361 gnu_type = make_node (Is_Record_Type (gnat_equiv)
362 ? tree_code_for_record_type (gnat_equiv)
363 : ENUMERAL_TYPE);
364 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
365 TYPE_DUMMY_P (gnu_type) = 1;
366 TYPE_STUB_DECL (gnu_type)
367 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
368 if (Is_By_Reference_Type (gnat_equiv))
369 TYPE_BY_REFERENCE_P (gnu_type) = 1;
370
371 SET_DUMMY_NODE (gnat_equiv, gnu_type);
372
373 return gnu_type;
374 }
375
376 /* Return the dummy type that was made for GNAT_TYPE, if any. */
377
378 tree
379 get_dummy_type (Entity_Id gnat_type)
380 {
381 return GET_DUMMY_NODE (gnat_type);
382 }
383
384 /* Build dummy fat and thin pointer types whose designated type is specified
385 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
386
387 void
388 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
389 {
390 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
391 tree gnu_fat_type, fields, gnu_object_type;
392
393 gnu_template_type = make_node (RECORD_TYPE);
394 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
395 TYPE_DUMMY_P (gnu_template_type) = 1;
396 gnu_ptr_template = build_pointer_type (gnu_template_type);
397
398 gnu_array_type = make_node (ENUMERAL_TYPE);
399 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
400 TYPE_DUMMY_P (gnu_array_type) = 1;
401 gnu_ptr_array = build_pointer_type (gnu_array_type);
402
403 gnu_fat_type = make_node (RECORD_TYPE);
404 /* Build a stub DECL to trigger the special processing for fat pointer types
405 in gnat_pushdecl. */
406 TYPE_NAME (gnu_fat_type)
407 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
408 gnu_fat_type);
409 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
410 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
411 DECL_CHAIN (fields)
412 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
413 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
414 finish_fat_pointer_type (gnu_fat_type, fields);
415 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
416 /* Suppress debug info until after the type is completed. */
417 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
418
419 gnu_object_type = make_node (RECORD_TYPE);
420 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
421 TYPE_DUMMY_P (gnu_object_type) = 1;
422
423 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
424 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
425 }
426 \f
427 /* Return true if we are in the global binding level. */
428
429 bool
430 global_bindings_p (void)
431 {
432 return force_global || current_function_decl == NULL_TREE;
433 }
434
435 /* Enter a new binding level. */
436
437 void
438 gnat_pushlevel (void)
439 {
440 struct gnat_binding_level *newlevel = NULL;
441
442 /* Reuse a struct for this binding level, if there is one. */
443 if (free_binding_level)
444 {
445 newlevel = free_binding_level;
446 free_binding_level = free_binding_level->chain;
447 }
448 else
449 newlevel = ggc_alloc<gnat_binding_level> ();
450
451 /* Use a free BLOCK, if any; otherwise, allocate one. */
452 if (free_block_chain)
453 {
454 newlevel->block = free_block_chain;
455 free_block_chain = BLOCK_CHAIN (free_block_chain);
456 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
457 }
458 else
459 newlevel->block = make_node (BLOCK);
460
461 /* Point the BLOCK we just made to its parent. */
462 if (current_binding_level)
463 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
464
465 BLOCK_VARS (newlevel->block) = NULL_TREE;
466 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
467 TREE_USED (newlevel->block) = 1;
468
469 /* Add this level to the front of the chain (stack) of active levels. */
470 newlevel->chain = current_binding_level;
471 newlevel->jmpbuf_decl = NULL_TREE;
472 current_binding_level = newlevel;
473 }
474
475 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
476 and point FNDECL to this BLOCK. */
477
478 void
479 set_current_block_context (tree fndecl)
480 {
481 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
482 DECL_INITIAL (fndecl) = current_binding_level->block;
483 set_block_for_group (current_binding_level->block);
484 }
485
486 /* Set the jmpbuf_decl for the current binding level to DECL. */
487
488 void
489 set_block_jmpbuf_decl (tree decl)
490 {
491 current_binding_level->jmpbuf_decl = decl;
492 }
493
494 /* Get the jmpbuf_decl, if any, for the current binding level. */
495
496 tree
497 get_block_jmpbuf_decl (void)
498 {
499 return current_binding_level->jmpbuf_decl;
500 }
501
502 /* Exit a binding level. Set any BLOCK into the current code group. */
503
504 void
505 gnat_poplevel (void)
506 {
507 struct gnat_binding_level *level = current_binding_level;
508 tree block = level->block;
509
510 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
511 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
512
513 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
514 are no variables free the block and merge its subblocks into those of its
515 parent block. Otherwise, add it to the list of its parent. */
516 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
517 ;
518 else if (BLOCK_VARS (block) == NULL_TREE)
519 {
520 BLOCK_SUBBLOCKS (level->chain->block)
521 = block_chainon (BLOCK_SUBBLOCKS (block),
522 BLOCK_SUBBLOCKS (level->chain->block));
523 BLOCK_CHAIN (block) = free_block_chain;
524 free_block_chain = block;
525 }
526 else
527 {
528 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
529 BLOCK_SUBBLOCKS (level->chain->block) = block;
530 TREE_USED (block) = 1;
531 set_block_for_group (block);
532 }
533
534 /* Free this binding structure. */
535 current_binding_level = level->chain;
536 level->chain = free_binding_level;
537 free_binding_level = level;
538 }
539
540 /* Exit a binding level and discard the associated BLOCK. */
541
542 void
543 gnat_zaplevel (void)
544 {
545 struct gnat_binding_level *level = current_binding_level;
546 tree block = level->block;
547
548 BLOCK_CHAIN (block) = free_block_chain;
549 free_block_chain = block;
550
551 /* Free this binding structure. */
552 current_binding_level = level->chain;
553 level->chain = free_binding_level;
554 free_binding_level = level;
555 }
556 \f
557 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
558
559 static void
560 gnat_set_type_context (tree type, tree context)
561 {
562 tree decl = TYPE_STUB_DECL (type);
563
564 TYPE_CONTEXT (type) = context;
565
566 while (decl && DECL_PARALLEL_TYPE (decl))
567 {
568 tree parallel_type = DECL_PARALLEL_TYPE (decl);
569
570 /* Give a context to the parallel types and their stub decl, if any.
571 Some parallel types seems to be present in multiple parallel type
572 chains, so don't mess with their context if they already have one. */
573 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
574 {
575 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
576 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
577 TYPE_CONTEXT (parallel_type) = context;
578 }
579
580 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
581 }
582 }
583
584 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
585 the debug info, or Empty if there is no such scope. If not NULL, set
586 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
587
588 Entity_Id
589 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
590 {
591 Entity_Id gnat_entity;
592
593 if (is_subprogram)
594 *is_subprogram = false;
595
596 if (Nkind (gnat_node) == N_Defining_Identifier
597 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
598 gnat_entity = Scope (gnat_node);
599 else
600 return Empty;
601
602 while (Present (gnat_entity))
603 {
604 switch (Ekind (gnat_entity))
605 {
606 case E_Function:
607 case E_Procedure:
608 if (Present (Protected_Body_Subprogram (gnat_entity)))
609 gnat_entity = Protected_Body_Subprogram (gnat_entity);
610
611 /* If the scope is a subprogram, then just rely on
612 current_function_decl, so that we don't have to defer
613 anything. This is needed because other places rely on the
614 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
615 if (is_subprogram)
616 *is_subprogram = true;
617 return gnat_entity;
618
619 case E_Record_Type:
620 case E_Record_Subtype:
621 return gnat_entity;
622
623 default:
624 /* By default, we are not interested in this particular scope: go to
625 the outer one. */
626 break;
627 }
628 gnat_entity = Scope (gnat_entity);
629 }
630 return Empty;
631 }
632
633 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
634 N otherwise. */
635
636 static void
637 defer_or_set_type_context (tree type,
638 tree context,
639 struct deferred_decl_context_node *n)
640 {
641 if (n)
642 add_deferred_type_context (n, type);
643 else
644 gnat_set_type_context (type, context);
645 }
646
647 /* Return global_context. Create it if needed, first. */
648
649 static tree
650 get_global_context (void)
651 {
652 if (!global_context)
653 {
654 global_context = build_translation_unit_decl (NULL_TREE);
655 debug_hooks->register_main_translation_unit (global_context);
656 }
657 return global_context;
658 }
659
660 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
661 for location information and flag propagation. */
662
663 void
664 gnat_pushdecl (tree decl, Node_Id gnat_node)
665 {
666 tree context = NULL_TREE;
667 struct deferred_decl_context_node *deferred_decl_context = NULL;
668
669 /* If explicitely asked to make DECL global or if it's an imported nested
670 object, short-circuit the regular Scope-based context computation. */
671 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
672 {
673 /* Rely on the GNAT scope, or fallback to the current_function_decl if
674 the GNAT scope reached the global scope, if it reached a subprogram
675 or the declaration is a subprogram or a variable (for them we skip
676 intermediate context types because the subprogram body elaboration
677 machinery and the inliner both expect a subprogram context).
678
679 Falling back to current_function_decl is necessary for implicit
680 subprograms created by gigi, such as the elaboration subprograms. */
681 bool context_is_subprogram = false;
682 const Entity_Id gnat_scope
683 = get_debug_scope (gnat_node, &context_is_subprogram);
684
685 if (Present (gnat_scope)
686 && !context_is_subprogram
687 && TREE_CODE (decl) != FUNCTION_DECL
688 && TREE_CODE (decl) != VAR_DECL)
689 /* Always assume the scope has not been elaborated, thus defer the
690 context propagation to the time its elaboration will be
691 available. */
692 deferred_decl_context
693 = add_deferred_decl_context (decl, gnat_scope, force_global);
694
695 /* External declarations (when force_global > 0) may not be in a
696 local context. */
697 else if (current_function_decl != NULL_TREE && force_global == 0)
698 context = current_function_decl;
699 }
700
701 /* If either we are forced to be in global mode or if both the GNAT scope and
702 the current_function_decl did not help determining the context, use the
703 global scope. */
704 if (!deferred_decl_context && context == NULL_TREE)
705 context = get_global_context ();
706
707 /* Functions imported in another function are not really nested.
708 For really nested functions mark them initially as needing
709 a static chain for uses of that flag before unnesting;
710 lower_nested_functions will then recompute it. */
711 if (TREE_CODE (decl) == FUNCTION_DECL
712 && !TREE_PUBLIC (decl)
713 && context != NULL_TREE
714 && (TREE_CODE (context) == FUNCTION_DECL
715 || decl_function_context (context) != NULL_TREE))
716 DECL_STATIC_CHAIN (decl) = 1;
717
718 if (!deferred_decl_context)
719 DECL_CONTEXT (decl) = context;
720
721 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
722
723 /* Set the location of DECL and emit a declaration for it. */
724 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
725 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
726
727 add_decl_expr (decl, gnat_node);
728
729 /* Put the declaration on the list. The list of declarations is in reverse
730 order. The list will be reversed later. Put global declarations in the
731 globals list and local ones in the current block. But skip TYPE_DECLs
732 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
733 with the debugger and aren't needed anyway. */
734 if (!(TREE_CODE (decl) == TYPE_DECL
735 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
736 {
737 if (DECL_EXTERNAL (decl))
738 {
739 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
740 vec_safe_push (builtin_decls, decl);
741 }
742 else if (global_bindings_p ())
743 vec_safe_push (global_decls, decl);
744 else
745 {
746 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
747 BLOCK_VARS (current_binding_level->block) = decl;
748 }
749 }
750
751 /* For the declaration of a type, set its name either if it isn't already
752 set or if the previous type name was not derived from a source name.
753 We'd rather have the type named with a real name and all the pointer
754 types to the same object have the same node, except when the names are
755 both derived from source names. */
756 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
757 {
758 tree t = TREE_TYPE (decl);
759
760 /* Array and pointer types aren't tagged types in the C sense so we need
761 to generate a typedef in DWARF for them and make sure it is preserved,
762 unless the type is artificial. */
763 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
764 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
765 || DECL_ARTIFICIAL (decl)))
766 ;
767 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
768 generate the typedef in DWARF. Also do that for fat pointer types
769 because, even though they are tagged types in the C sense, they are
770 still XUP types attached to the base array type at this point. */
771 else if (!DECL_ARTIFICIAL (decl)
772 && (TREE_CODE (t) == ARRAY_TYPE
773 || TREE_CODE (t) == POINTER_TYPE
774 || TYPE_IS_FAT_POINTER_P (t)))
775 {
776 tree tt;
777 /* ??? Copy and original type are not supposed to be variant but we
778 really need a variant for the placeholder machinery to work. */
779 if (TYPE_IS_FAT_POINTER_P (t))
780 tt = build_variant_type_copy (t);
781 else
782 {
783 /* TYPE_NEXT_PTR_TO is a chain of main variants. */
784 tt = build_distinct_type_copy (TYPE_MAIN_VARIANT (t));
785 if (TREE_CODE (t) == POINTER_TYPE)
786 TYPE_NEXT_PTR_TO (TYPE_MAIN_VARIANT (t)) = tt;
787 tt = build_qualified_type (tt, TYPE_QUALS (t));
788 }
789 TYPE_NAME (tt) = decl;
790 defer_or_set_type_context (tt,
791 DECL_CONTEXT (decl),
792 deferred_decl_context);
793 TREE_USED (tt) = TREE_USED (t);
794 TREE_TYPE (decl) = tt;
795 if (TYPE_NAME (t)
796 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
797 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
798 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
799 else
800 DECL_ORIGINAL_TYPE (decl) = t;
801 /* Array types need to have a name so that they can be related to
802 their GNAT encodings. */
803 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
804 TYPE_NAME (t) = DECL_NAME (decl);
805 t = NULL_TREE;
806 }
807 else if (TYPE_NAME (t)
808 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
809 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
810 ;
811 else
812 t = NULL_TREE;
813
814 /* Propagate the name to all the variants, this is needed for the type
815 qualifiers machinery to work properly (see check_qualified_type).
816 Also propagate the context to them. Note that it will be propagated
817 to all parallel types too thanks to gnat_set_type_context. */
818 if (t)
819 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
820 /* ??? Because of the previous kludge, we can have variants of fat
821 pointer types with different names. */
822 if (!(TYPE_IS_FAT_POINTER_P (t)
823 && TYPE_NAME (t)
824 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
825 {
826 TYPE_NAME (t) = decl;
827 defer_or_set_type_context (t,
828 DECL_CONTEXT (decl),
829 deferred_decl_context);
830 }
831 }
832 }
833 \f
834 /* Create a record type that contains a SIZE bytes long field of TYPE with a
835 starting bit position so that it is aligned to ALIGN bits, and leaving at
836 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
837 record is guaranteed to get. GNAT_NODE is used for the position of the
838 associated TYPE_DECL. */
839
840 tree
841 make_aligning_type (tree type, unsigned int align, tree size,
842 unsigned int base_align, int room, Node_Id gnat_node)
843 {
844 /* We will be crafting a record type with one field at a position set to be
845 the next multiple of ALIGN past record'address + room bytes. We use a
846 record placeholder to express record'address. */
847 tree record_type = make_node (RECORD_TYPE);
848 tree record = build0 (PLACEHOLDER_EXPR, record_type);
849
850 tree record_addr_st
851 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
852
853 /* The diagram below summarizes the shape of what we manipulate:
854
855 <--------- pos ---------->
856 { +------------+-------------+-----------------+
857 record =>{ |############| ... | field (type) |
858 { +------------+-------------+-----------------+
859 |<-- room -->|<- voffset ->|<---- size ----->|
860 o o
861 | |
862 record_addr vblock_addr
863
864 Every length is in sizetype bytes there, except "pos" which has to be
865 set as a bit position in the GCC tree for the record. */
866 tree room_st = size_int (room);
867 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
868 tree voffset_st, pos, field;
869
870 tree name = TYPE_IDENTIFIER (type);
871
872 name = concat_name (name, "ALIGN");
873 TYPE_NAME (record_type) = name;
874
875 /* Compute VOFFSET and then POS. The next byte position multiple of some
876 alignment after some address is obtained by "and"ing the alignment minus
877 1 with the two's complement of the address. */
878 voffset_st = size_binop (BIT_AND_EXPR,
879 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
880 size_int ((align / BITS_PER_UNIT) - 1));
881
882 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
883 pos = size_binop (MULT_EXPR,
884 convert (bitsizetype,
885 size_binop (PLUS_EXPR, room_st, voffset_st)),
886 bitsize_unit_node);
887
888 /* Craft the GCC record representation. We exceptionally do everything
889 manually here because 1) our generic circuitry is not quite ready to
890 handle the complex position/size expressions we are setting up, 2) we
891 have a strong simplifying factor at hand: we know the maximum possible
892 value of voffset, and 3) we have to set/reset at least the sizes in
893 accordance with this maximum value anyway, as we need them to convey
894 what should be "alloc"ated for this type.
895
896 Use -1 as the 'addressable' indication for the field to prevent the
897 creation of a bitfield. We don't need one, it would have damaging
898 consequences on the alignment computation, and create_field_decl would
899 make one without this special argument, for instance because of the
900 complex position expression. */
901 field = create_field_decl (get_identifier ("F"), type, record_type, size,
902 pos, 1, -1);
903 TYPE_FIELDS (record_type) = field;
904
905 TYPE_ALIGN (record_type) = base_align;
906 TYPE_USER_ALIGN (record_type) = 1;
907
908 TYPE_SIZE (record_type)
909 = size_binop (PLUS_EXPR,
910 size_binop (MULT_EXPR, convert (bitsizetype, size),
911 bitsize_unit_node),
912 bitsize_int (align + room * BITS_PER_UNIT));
913 TYPE_SIZE_UNIT (record_type)
914 = size_binop (PLUS_EXPR, size,
915 size_int (room + align / BITS_PER_UNIT));
916
917 SET_TYPE_MODE (record_type, BLKmode);
918 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
919
920 /* Declare it now since it will never be declared otherwise. This is
921 necessary to ensure that its subtrees are properly marked. */
922 create_type_decl (name, record_type, true, false, gnat_node);
923
924 return record_type;
925 }
926
927 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
928 as the field type of a packed record if IN_RECORD is true, or as the
929 component type of a packed array if IN_RECORD is false. See if we can
930 rewrite it either as a type that has a non-BLKmode, which we can pack
931 tighter in the packed record case, or as a smaller type. If so, return
932 the new type. If not, return the original type. */
933
934 tree
935 make_packable_type (tree type, bool in_record)
936 {
937 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
938 unsigned HOST_WIDE_INT new_size;
939 tree new_type, old_field, field_list = NULL_TREE;
940 unsigned int align;
941
942 /* No point in doing anything if the size is zero. */
943 if (size == 0)
944 return type;
945
946 new_type = make_node (TREE_CODE (type));
947
948 /* Copy the name and flags from the old type to that of the new.
949 Note that we rely on the pointer equality created here for
950 TYPE_NAME to look through conversions in various places. */
951 TYPE_NAME (new_type) = TYPE_NAME (type);
952 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
953 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
954 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
955 if (TREE_CODE (type) == RECORD_TYPE)
956 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
957
958 /* If we are in a record and have a small size, set the alignment to
959 try for an integral mode. Otherwise set it to try for a smaller
960 type with BLKmode. */
961 if (in_record && size <= MAX_FIXED_MODE_SIZE)
962 {
963 align = ceil_pow2 (size);
964 TYPE_ALIGN (new_type) = align;
965 new_size = (size + align - 1) & -align;
966 }
967 else
968 {
969 unsigned HOST_WIDE_INT align;
970
971 /* Do not try to shrink the size if the RM size is not constant. */
972 if (TYPE_CONTAINS_TEMPLATE_P (type)
973 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
974 return type;
975
976 /* Round the RM size up to a unit boundary to get the minimal size
977 for a BLKmode record. Give up if it's already the size. */
978 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
979 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
980 if (new_size == size)
981 return type;
982
983 align = new_size & -new_size;
984 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
985 }
986
987 TYPE_USER_ALIGN (new_type) = 1;
988
989 /* Now copy the fields, keeping the position and size as we don't want
990 to change the layout by propagating the packedness downwards. */
991 for (old_field = TYPE_FIELDS (type); old_field;
992 old_field = DECL_CHAIN (old_field))
993 {
994 tree new_field_type = TREE_TYPE (old_field);
995 tree new_field, new_size;
996
997 if (RECORD_OR_UNION_TYPE_P (new_field_type)
998 && !TYPE_FAT_POINTER_P (new_field_type)
999 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1000 new_field_type = make_packable_type (new_field_type, true);
1001
1002 /* However, for the last field in a not already packed record type
1003 that is of an aggregate type, we need to use the RM size in the
1004 packable version of the record type, see finish_record_type. */
1005 if (!DECL_CHAIN (old_field)
1006 && !TYPE_PACKED (type)
1007 && RECORD_OR_UNION_TYPE_P (new_field_type)
1008 && !TYPE_FAT_POINTER_P (new_field_type)
1009 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1010 && TYPE_ADA_SIZE (new_field_type))
1011 new_size = TYPE_ADA_SIZE (new_field_type);
1012 else
1013 new_size = DECL_SIZE (old_field);
1014
1015 new_field
1016 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1017 new_size, bit_position (old_field),
1018 TYPE_PACKED (type),
1019 !DECL_NONADDRESSABLE_P (old_field));
1020
1021 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1022 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1023 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1024 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1025
1026 DECL_CHAIN (new_field) = field_list;
1027 field_list = new_field;
1028 }
1029
1030 finish_record_type (new_type, nreverse (field_list), 2, false);
1031 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1032 if (TYPE_STUB_DECL (type))
1033 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1034 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1035
1036 /* If this is a padding record, we never want to make the size smaller
1037 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1038 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1039 {
1040 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1041 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1042 new_size = size;
1043 }
1044 else
1045 {
1046 TYPE_SIZE (new_type) = bitsize_int (new_size);
1047 TYPE_SIZE_UNIT (new_type)
1048 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1049 }
1050
1051 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1052 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1053
1054 compute_record_mode (new_type);
1055
1056 /* Try harder to get a packable type if necessary, for example
1057 in case the record itself contains a BLKmode field. */
1058 if (in_record && TYPE_MODE (new_type) == BLKmode)
1059 SET_TYPE_MODE (new_type,
1060 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1061
1062 /* If neither the mode nor the size has shrunk, return the old type. */
1063 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1064 return type;
1065
1066 return new_type;
1067 }
1068
1069 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1070 If TYPE is the best type, return it. Otherwise, make a new type. We
1071 only support new integral and pointer types. FOR_BIASED is true if
1072 we are making a biased type. */
1073
1074 tree
1075 make_type_from_size (tree type, tree size_tree, bool for_biased)
1076 {
1077 unsigned HOST_WIDE_INT size;
1078 bool biased_p;
1079 tree new_type;
1080
1081 /* If size indicates an error, just return TYPE to avoid propagating
1082 the error. Likewise if it's too large to represent. */
1083 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1084 return type;
1085
1086 size = tree_to_uhwi (size_tree);
1087
1088 switch (TREE_CODE (type))
1089 {
1090 case INTEGER_TYPE:
1091 case ENUMERAL_TYPE:
1092 case BOOLEAN_TYPE:
1093 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1094 && TYPE_BIASED_REPRESENTATION_P (type));
1095
1096 /* Integer types with precision 0 are forbidden. */
1097 if (size == 0)
1098 size = 1;
1099
1100 /* Only do something if the type isn't a packed array type and doesn't
1101 already have the proper size and the size isn't too large. */
1102 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1103 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1104 || size > LONG_LONG_TYPE_SIZE)
1105 break;
1106
1107 biased_p |= for_biased;
1108 if (TYPE_UNSIGNED (type) || biased_p)
1109 new_type = make_unsigned_type (size);
1110 else
1111 new_type = make_signed_type (size);
1112 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1113 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1114 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1115 /* Copy the name to show that it's essentially the same type and
1116 not a subrange type. */
1117 TYPE_NAME (new_type) = TYPE_NAME (type);
1118 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1119 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1120 return new_type;
1121
1122 case RECORD_TYPE:
1123 /* Do something if this is a fat pointer, in which case we
1124 may need to return the thin pointer. */
1125 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1126 {
1127 machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1128 if (!targetm.valid_pointer_mode (p_mode))
1129 p_mode = ptr_mode;
1130 return
1131 build_pointer_type_for_mode
1132 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1133 p_mode, 0);
1134 }
1135 break;
1136
1137 case POINTER_TYPE:
1138 /* Only do something if this is a thin pointer, in which case we
1139 may need to return the fat pointer. */
1140 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1141 return
1142 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1143 break;
1144
1145 default:
1146 break;
1147 }
1148
1149 return type;
1150 }
1151
1152 /* See if the data pointed to by the hash table slot is marked. */
1153
1154 int
1155 pad_type_hasher::keep_cache_entry (pad_type_hash *&t)
1156 {
1157 return ggc_marked_p (t->type);
1158 }
1159
1160 /* Return true iff the padded types are equivalent. */
1161
1162 bool
1163 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1164 {
1165 tree type1, type2;
1166
1167 if (t1->hash != t2->hash)
1168 return 0;
1169
1170 type1 = t1->type;
1171 type2 = t2->type;
1172
1173 /* We consider that the padded types are equivalent if they pad the same type
1174 and have the same size, alignment, RM size and storage order. Taking the
1175 mode into account is redundant since it is determined by the others. */
1176 return
1177 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1178 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1179 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1180 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1181 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1182 }
1183
1184 /* Look up the padded TYPE in the hash table and return its canonical version
1185 if it exists; otherwise, insert it into the hash table. */
1186
1187 static tree
1188 lookup_and_insert_pad_type (tree type)
1189 {
1190 hashval_t hashcode;
1191 struct pad_type_hash in, *h;
1192
1193 hashcode
1194 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1195 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1196 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1197 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1198
1199 in.hash = hashcode;
1200 in.type = type;
1201 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1202 if (h)
1203 return h->type;
1204
1205 h = ggc_alloc<pad_type_hash> ();
1206 h->hash = hashcode;
1207 h->type = type;
1208 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1209 return NULL_TREE;
1210 }
1211
1212 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1213 if needed. We have already verified that SIZE and ALIGN are large enough.
1214 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1215 IS_COMPONENT_TYPE is true if this is being done for the component type of
1216 an array. IS_USER_TYPE is true if the original type needs to be completed.
1217 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1218 the RM size of the resulting type is to be set to SIZE too. */
1219
1220 tree
1221 maybe_pad_type (tree type, tree size, unsigned int align,
1222 Entity_Id gnat_entity, bool is_component_type,
1223 bool is_user_type, bool definition, bool set_rm_size)
1224 {
1225 tree orig_size = TYPE_SIZE (type);
1226 unsigned int orig_align = TYPE_ALIGN (type);
1227 tree record, field;
1228
1229 /* If TYPE is a padded type, see if it agrees with any size and alignment
1230 we were given. If so, return the original type. Otherwise, strip
1231 off the padding, since we will either be returning the inner type
1232 or repadding it. If no size or alignment is specified, use that of
1233 the original padded type. */
1234 if (TYPE_IS_PADDING_P (type))
1235 {
1236 if ((!size
1237 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1238 && (align == 0 || align == orig_align))
1239 return type;
1240
1241 if (!size)
1242 size = orig_size;
1243 if (align == 0)
1244 align = orig_align;
1245
1246 type = TREE_TYPE (TYPE_FIELDS (type));
1247 orig_size = TYPE_SIZE (type);
1248 orig_align = TYPE_ALIGN (type);
1249 }
1250
1251 /* If the size is either not being changed or is being made smaller (which
1252 is not done here and is only valid for bitfields anyway), show the size
1253 isn't changing. Likewise, clear the alignment if it isn't being
1254 changed. Then return if we aren't doing anything. */
1255 if (size
1256 && (operand_equal_p (size, orig_size, 0)
1257 || (TREE_CODE (orig_size) == INTEGER_CST
1258 && tree_int_cst_lt (size, orig_size))))
1259 size = NULL_TREE;
1260
1261 if (align == orig_align)
1262 align = 0;
1263
1264 if (align == 0 && !size)
1265 return type;
1266
1267 /* If requested, complete the original type and give it a name. */
1268 if (is_user_type)
1269 create_type_decl (get_entity_name (gnat_entity), type,
1270 !Comes_From_Source (gnat_entity),
1271 !(TYPE_NAME (type)
1272 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1273 && DECL_IGNORED_P (TYPE_NAME (type))),
1274 gnat_entity);
1275
1276 /* We used to modify the record in place in some cases, but that could
1277 generate incorrect debugging information. So make a new record
1278 type and name. */
1279 record = make_node (RECORD_TYPE);
1280 TYPE_PADDING_P (record) = 1;
1281
1282 if (Present (gnat_entity))
1283 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1284
1285 TYPE_ALIGN (record) = align ? align : orig_align;
1286 TYPE_SIZE (record) = size ? size : orig_size;
1287 TYPE_SIZE_UNIT (record)
1288 = convert (sizetype,
1289 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1290 bitsize_unit_node));
1291
1292 /* If we are changing the alignment and the input type is a record with
1293 BLKmode and a small constant size, try to make a form that has an
1294 integral mode. This might allow the padding record to also have an
1295 integral mode, which will be much more efficient. There is no point
1296 in doing so if a size is specified unless it is also a small constant
1297 size and it is incorrect to do so if we cannot guarantee that the mode
1298 will be naturally aligned since the field must always be addressable.
1299
1300 ??? This might not always be a win when done for a stand-alone object:
1301 since the nominal and the effective type of the object will now have
1302 different modes, a VIEW_CONVERT_EXPR will be required for converting
1303 between them and it might be hard to overcome afterwards, including
1304 at the RTL level when the stand-alone object is accessed as a whole. */
1305 if (align != 0
1306 && RECORD_OR_UNION_TYPE_P (type)
1307 && TYPE_MODE (type) == BLKmode
1308 && !TYPE_BY_REFERENCE_P (type)
1309 && TREE_CODE (orig_size) == INTEGER_CST
1310 && !TREE_OVERFLOW (orig_size)
1311 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1312 && (!size
1313 || (TREE_CODE (size) == INTEGER_CST
1314 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1315 {
1316 tree packable_type = make_packable_type (type, true);
1317 if (TYPE_MODE (packable_type) != BLKmode
1318 && align >= TYPE_ALIGN (packable_type))
1319 type = packable_type;
1320 }
1321
1322 /* Now create the field with the original size. */
1323 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1324 bitsize_zero_node, 0, 1);
1325 DECL_INTERNAL_P (field) = 1;
1326
1327 /* Do not emit debug info until after the auxiliary record is built. */
1328 finish_record_type (record, field, 1, false);
1329
1330 /* Set the RM size if requested. */
1331 if (set_rm_size)
1332 {
1333 tree canonical_pad_type;
1334
1335 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1336
1337 /* If the padded type is complete and has constant size, we canonicalize
1338 it by means of the hash table. This is consistent with the language
1339 semantics and ensures that gigi and the middle-end have a common view
1340 of these padded types. */
1341 if (TREE_CONSTANT (TYPE_SIZE (record))
1342 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1343 {
1344 record = canonical_pad_type;
1345 goto built;
1346 }
1347 }
1348
1349 /* Unless debugging information isn't being written for the input type,
1350 write a record that shows what we are a subtype of and also make a
1351 variable that indicates our size, if still variable. */
1352 if (TREE_CODE (orig_size) != INTEGER_CST
1353 && TYPE_NAME (record)
1354 && TYPE_NAME (type)
1355 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1356 && DECL_IGNORED_P (TYPE_NAME (type))))
1357 {
1358 tree name = TYPE_IDENTIFIER (record);
1359 tree size_unit = TYPE_SIZE_UNIT (record);
1360
1361 /* A variable that holds the size is required even with no encoding since
1362 it will be referenced by debugging information attributes. At global
1363 level, we need a single variable across all translation units. */
1364 if (size
1365 && TREE_CODE (size) != INTEGER_CST
1366 && (definition || global_bindings_p ()))
1367 {
1368 size_unit
1369 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1370 size_unit, true, global_bindings_p (),
1371 !definition && global_bindings_p (), false,
1372 false, true, true, NULL, gnat_entity);
1373 TYPE_SIZE_UNIT (record) = size_unit;
1374 }
1375
1376 tree marker = make_node (RECORD_TYPE);
1377 tree orig_name = TYPE_IDENTIFIER (type);
1378
1379 TYPE_NAME (marker) = concat_name (name, "XVS");
1380 finish_record_type (marker,
1381 create_field_decl (orig_name,
1382 build_reference_type (type),
1383 marker, NULL_TREE, NULL_TREE,
1384 0, 0),
1385 0, true);
1386 TYPE_SIZE_UNIT (marker) = size_unit;
1387
1388 add_parallel_type (record, marker);
1389 }
1390
1391 rest_of_record_type_compilation (record);
1392
1393 built:
1394 /* If the size was widened explicitly, maybe give a warning. Take the
1395 original size as the maximum size of the input if there was an
1396 unconstrained record involved and round it up to the specified alignment,
1397 if one was specified. But don't do it if we are just annotating types
1398 and the type is tagged, since tagged types aren't fully laid out in this
1399 mode. */
1400 if (!size
1401 || TREE_CODE (size) == COND_EXPR
1402 || TREE_CODE (size) == MAX_EXPR
1403 || No (gnat_entity)
1404 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1405 return record;
1406
1407 if (CONTAINS_PLACEHOLDER_P (orig_size))
1408 orig_size = max_size (orig_size, true);
1409
1410 if (align && AGGREGATE_TYPE_P (type))
1411 orig_size = round_up (orig_size, align);
1412
1413 if (!operand_equal_p (size, orig_size, 0)
1414 && !(TREE_CODE (size) == INTEGER_CST
1415 && TREE_CODE (orig_size) == INTEGER_CST
1416 && (TREE_OVERFLOW (size)
1417 || TREE_OVERFLOW (orig_size)
1418 || tree_int_cst_lt (size, orig_size))))
1419 {
1420 Node_Id gnat_error_node = Empty;
1421
1422 /* For a packed array, post the message on the original array type. */
1423 if (Is_Packed_Array_Impl_Type (gnat_entity))
1424 gnat_entity = Original_Array_Type (gnat_entity);
1425
1426 if ((Ekind (gnat_entity) == E_Component
1427 || Ekind (gnat_entity) == E_Discriminant)
1428 && Present (Component_Clause (gnat_entity)))
1429 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1430 else if (Present (Size_Clause (gnat_entity)))
1431 gnat_error_node = Expression (Size_Clause (gnat_entity));
1432
1433 /* Generate message only for entities that come from source, since
1434 if we have an entity created by expansion, the message will be
1435 generated for some other corresponding source entity. */
1436 if (Comes_From_Source (gnat_entity))
1437 {
1438 if (Present (gnat_error_node))
1439 post_error_ne_tree ("{^ }bits of & unused?",
1440 gnat_error_node, gnat_entity,
1441 size_diffop (size, orig_size));
1442 else if (is_component_type)
1443 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1444 gnat_entity, gnat_entity,
1445 size_diffop (size, orig_size));
1446 }
1447 }
1448
1449 return record;
1450 }
1451
1452 /* Return a copy of the padded TYPE but with reverse storage order. */
1453
1454 tree
1455 set_reverse_storage_order_on_pad_type (tree type)
1456 {
1457 tree field, canonical_pad_type;
1458
1459 #ifdef ENABLE_CHECKING
1460 /* If the inner type is not scalar then the function does nothing. */
1461 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1462 gcc_assert (!AGGREGATE_TYPE_P (inner_type) && !VECTOR_TYPE_P (inner_type));
1463 #endif
1464
1465 /* This is required for the canonicalization. */
1466 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1467
1468 field = copy_node (TYPE_FIELDS (type));
1469 type = copy_type (type);
1470 DECL_CONTEXT (field) = type;
1471 TYPE_FIELDS (type) = field;
1472 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1473 canonical_pad_type = lookup_and_insert_pad_type (type);
1474 return canonical_pad_type ? canonical_pad_type : type;
1475 }
1476 \f
1477 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1478 If this is a multi-dimensional array type, do this recursively.
1479
1480 OP may be
1481 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1482 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1483 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1484
1485 void
1486 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1487 {
1488 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1489 of a one-dimensional array, since the padding has the same alias set
1490 as the field type, but if it's a multi-dimensional array, we need to
1491 see the inner types. */
1492 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1493 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1494 || TYPE_PADDING_P (gnu_old_type)))
1495 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1496
1497 /* Unconstrained array types are deemed incomplete and would thus be given
1498 alias set 0. Retrieve the underlying array type. */
1499 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1500 gnu_old_type
1501 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1502 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1503 gnu_new_type
1504 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1505
1506 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1507 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1508 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1509 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1510
1511 switch (op)
1512 {
1513 case ALIAS_SET_COPY:
1514 /* The alias set shouldn't be copied between array types with different
1515 aliasing settings because this can break the aliasing relationship
1516 between the array type and its element type. */
1517 if (flag_checking || flag_strict_aliasing)
1518 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1519 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1520 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1521 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1522
1523 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1524 break;
1525
1526 case ALIAS_SET_SUBSET:
1527 case ALIAS_SET_SUPERSET:
1528 {
1529 alias_set_type old_set = get_alias_set (gnu_old_type);
1530 alias_set_type new_set = get_alias_set (gnu_new_type);
1531
1532 /* Do nothing if the alias sets conflict. This ensures that we
1533 never call record_alias_subset several times for the same pair
1534 or at all for alias set 0. */
1535 if (!alias_sets_conflict_p (old_set, new_set))
1536 {
1537 if (op == ALIAS_SET_SUBSET)
1538 record_alias_subset (old_set, new_set);
1539 else
1540 record_alias_subset (new_set, old_set);
1541 }
1542 }
1543 break;
1544
1545 default:
1546 gcc_unreachable ();
1547 }
1548
1549 record_component_aliases (gnu_new_type);
1550 }
1551 \f
1552 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1553 ARTIFICIAL_P is true if the type was generated by the compiler. */
1554
1555 void
1556 record_builtin_type (const char *name, tree type, bool artificial_p)
1557 {
1558 tree type_decl = build_decl (input_location,
1559 TYPE_DECL, get_identifier (name), type);
1560 DECL_ARTIFICIAL (type_decl) = artificial_p;
1561 TYPE_ARTIFICIAL (type) = artificial_p;
1562 gnat_pushdecl (type_decl, Empty);
1563
1564 if (debug_hooks->type_decl)
1565 debug_hooks->type_decl (type_decl, false);
1566 }
1567 \f
1568 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1569 finish constructing the record type as a fat pointer type. */
1570
1571 void
1572 finish_fat_pointer_type (tree record_type, tree field_list)
1573 {
1574 /* Make sure we can put it into a register. */
1575 if (STRICT_ALIGNMENT)
1576 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1577
1578 /* Show what it really is. */
1579 TYPE_FAT_POINTER_P (record_type) = 1;
1580
1581 /* Do not emit debug info for it since the types of its fields may still be
1582 incomplete at this point. */
1583 finish_record_type (record_type, field_list, 0, false);
1584
1585 /* Force type_contains_placeholder_p to return true on it. Although the
1586 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1587 type but the representation of the unconstrained array. */
1588 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1589 }
1590
1591 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1592 finish constructing the record or union type. If REP_LEVEL is zero, this
1593 record has no representation clause and so will be entirely laid out here.
1594 If REP_LEVEL is one, this record has a representation clause and has been
1595 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1596 this record is derived from a parent record and thus inherits its layout;
1597 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1598 we need to write debug information about this type. */
1599
1600 void
1601 finish_record_type (tree record_type, tree field_list, int rep_level,
1602 bool debug_info_p)
1603 {
1604 enum tree_code code = TREE_CODE (record_type);
1605 tree name = TYPE_IDENTIFIER (record_type);
1606 tree ada_size = bitsize_zero_node;
1607 tree size = bitsize_zero_node;
1608 bool had_size = TYPE_SIZE (record_type) != 0;
1609 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1610 bool had_align = TYPE_ALIGN (record_type) != 0;
1611 tree field;
1612
1613 TYPE_FIELDS (record_type) = field_list;
1614
1615 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1616 generate debug info and have a parallel type. */
1617 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1618
1619 /* Globally initialize the record first. If this is a rep'ed record,
1620 that just means some initializations; otherwise, layout the record. */
1621 if (rep_level > 0)
1622 {
1623 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1624
1625 if (!had_size_unit)
1626 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1627
1628 if (!had_size)
1629 TYPE_SIZE (record_type) = bitsize_zero_node;
1630
1631 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1632 out just like a UNION_TYPE, since the size will be fixed. */
1633 else if (code == QUAL_UNION_TYPE)
1634 code = UNION_TYPE;
1635 }
1636 else
1637 {
1638 /* Ensure there isn't a size already set. There can be in an error
1639 case where there is a rep clause but all fields have errors and
1640 no longer have a position. */
1641 TYPE_SIZE (record_type) = 0;
1642
1643 /* Ensure we use the traditional GCC layout for bitfields when we need
1644 to pack the record type or have a representation clause. The other
1645 possible layout (Microsoft C compiler), if available, would prevent
1646 efficient packing in almost all cases. */
1647 #ifdef TARGET_MS_BITFIELD_LAYOUT
1648 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1649 decl_attributes (&record_type,
1650 tree_cons (get_identifier ("gcc_struct"),
1651 NULL_TREE, NULL_TREE),
1652 ATTR_FLAG_TYPE_IN_PLACE);
1653 #endif
1654
1655 layout_type (record_type);
1656 }
1657
1658 /* At this point, the position and size of each field is known. It was
1659 either set before entry by a rep clause, or by laying out the type above.
1660
1661 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1662 to compute the Ada size; the GCC size and alignment (for rep'ed records
1663 that are not padding types); and the mode (for rep'ed records). We also
1664 clear the DECL_BIT_FIELD indication for the cases we know have not been
1665 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1666
1667 if (code == QUAL_UNION_TYPE)
1668 field_list = nreverse (field_list);
1669
1670 for (field = field_list; field; field = DECL_CHAIN (field))
1671 {
1672 tree type = TREE_TYPE (field);
1673 tree pos = bit_position (field);
1674 tree this_size = DECL_SIZE (field);
1675 tree this_ada_size;
1676
1677 if (RECORD_OR_UNION_TYPE_P (type)
1678 && !TYPE_FAT_POINTER_P (type)
1679 && !TYPE_CONTAINS_TEMPLATE_P (type)
1680 && TYPE_ADA_SIZE (type))
1681 this_ada_size = TYPE_ADA_SIZE (type);
1682 else
1683 this_ada_size = this_size;
1684
1685 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1686 if (DECL_BIT_FIELD (field)
1687 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1688 {
1689 unsigned int align = TYPE_ALIGN (type);
1690
1691 /* In the general case, type alignment is required. */
1692 if (value_factor_p (pos, align))
1693 {
1694 /* The enclosing record type must be sufficiently aligned.
1695 Otherwise, if no alignment was specified for it and it
1696 has been laid out already, bump its alignment to the
1697 desired one if this is compatible with its size and
1698 maximum alignment, if any. */
1699 if (TYPE_ALIGN (record_type) >= align)
1700 {
1701 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1702 DECL_BIT_FIELD (field) = 0;
1703 }
1704 else if (!had_align
1705 && rep_level == 0
1706 && value_factor_p (TYPE_SIZE (record_type), align)
1707 && (!TYPE_MAX_ALIGN (record_type)
1708 || TYPE_MAX_ALIGN (record_type) >= align))
1709 {
1710 TYPE_ALIGN (record_type) = align;
1711 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1712 DECL_BIT_FIELD (field) = 0;
1713 }
1714 }
1715
1716 /* In the non-strict alignment case, only byte alignment is. */
1717 if (!STRICT_ALIGNMENT
1718 && DECL_BIT_FIELD (field)
1719 && value_factor_p (pos, BITS_PER_UNIT))
1720 DECL_BIT_FIELD (field) = 0;
1721 }
1722
1723 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1724 field is technically not addressable. Except that it can actually
1725 be addressed if it is BLKmode and happens to be properly aligned. */
1726 if (DECL_BIT_FIELD (field)
1727 && !(DECL_MODE (field) == BLKmode
1728 && value_factor_p (pos, BITS_PER_UNIT)))
1729 DECL_NONADDRESSABLE_P (field) = 1;
1730
1731 /* A type must be as aligned as its most aligned field that is not
1732 a bit-field. But this is already enforced by layout_type. */
1733 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1734 TYPE_ALIGN (record_type)
1735 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1736
1737 switch (code)
1738 {
1739 case UNION_TYPE:
1740 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1741 size = size_binop (MAX_EXPR, size, this_size);
1742 break;
1743
1744 case QUAL_UNION_TYPE:
1745 ada_size
1746 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1747 this_ada_size, ada_size);
1748 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1749 this_size, size);
1750 break;
1751
1752 case RECORD_TYPE:
1753 /* Since we know here that all fields are sorted in order of
1754 increasing bit position, the size of the record is one
1755 higher than the ending bit of the last field processed
1756 unless we have a rep clause, since in that case we might
1757 have a field outside a QUAL_UNION_TYPE that has a higher ending
1758 position. So use a MAX in that case. Also, if this field is a
1759 QUAL_UNION_TYPE, we need to take into account the previous size in
1760 the case of empty variants. */
1761 ada_size
1762 = merge_sizes (ada_size, pos, this_ada_size,
1763 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1764 size
1765 = merge_sizes (size, pos, this_size,
1766 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1767 break;
1768
1769 default:
1770 gcc_unreachable ();
1771 }
1772 }
1773
1774 if (code == QUAL_UNION_TYPE)
1775 nreverse (field_list);
1776
1777 if (rep_level < 2)
1778 {
1779 /* If this is a padding record, we never want to make the size smaller
1780 than what was specified in it, if any. */
1781 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1782 size = TYPE_SIZE (record_type);
1783
1784 /* Now set any of the values we've just computed that apply. */
1785 if (!TYPE_FAT_POINTER_P (record_type)
1786 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1787 SET_TYPE_ADA_SIZE (record_type, ada_size);
1788
1789 if (rep_level > 0)
1790 {
1791 tree size_unit = had_size_unit
1792 ? TYPE_SIZE_UNIT (record_type)
1793 : convert (sizetype,
1794 size_binop (CEIL_DIV_EXPR, size,
1795 bitsize_unit_node));
1796 unsigned int align = TYPE_ALIGN (record_type);
1797
1798 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1799 TYPE_SIZE_UNIT (record_type)
1800 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1801
1802 compute_record_mode (record_type);
1803 }
1804 }
1805
1806 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1807 TYPE_MAX_ALIGN (record_type) = 0;
1808
1809 if (debug_info_p)
1810 rest_of_record_type_compilation (record_type);
1811 }
1812
1813 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1814 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1815 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1816 moment TYPE will get a context. */
1817
1818 void
1819 add_parallel_type (tree type, tree parallel_type)
1820 {
1821 tree decl = TYPE_STUB_DECL (type);
1822
1823 while (DECL_PARALLEL_TYPE (decl))
1824 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1825
1826 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1827
1828 /* If PARALLEL_TYPE already has a context, we are done. */
1829 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1830 return;
1831
1832 /* Otherwise, try to get one from TYPE's context. */
1833 if (TYPE_CONTEXT (type) != NULL_TREE)
1834 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1835 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1836
1837 /* ... otherwise TYPE has not context yet. We know it will thanks to
1838 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1839 So we have nothing to do in this case. */
1840 }
1841
1842 /* Return true if TYPE has a parallel type. */
1843
1844 static bool
1845 has_parallel_type (tree type)
1846 {
1847 tree decl = TYPE_STUB_DECL (type);
1848
1849 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1850 }
1851
1852 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1853 associated with it. It need not be invoked directly in most cases since
1854 finish_record_type takes care of doing so, but this can be necessary if
1855 a parallel type is to be attached to the record type. */
1856
1857 void
1858 rest_of_record_type_compilation (tree record_type)
1859 {
1860 bool var_size = false;
1861 tree field;
1862
1863 /* If this is a padded type, the bulk of the debug info has already been
1864 generated for the field's type. */
1865 if (TYPE_IS_PADDING_P (record_type))
1866 return;
1867
1868 /* If the type already has a parallel type (XVS type), then we're done. */
1869 if (has_parallel_type (record_type))
1870 return;
1871
1872 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1873 {
1874 /* We need to make an XVE/XVU record if any field has variable size,
1875 whether or not the record does. For example, if we have a union,
1876 it may be that all fields, rounded up to the alignment, have the
1877 same size, in which case we'll use that size. But the debug
1878 output routines (except Dwarf2) won't be able to output the fields,
1879 so we need to make the special record. */
1880 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1881 /* If a field has a non-constant qualifier, the record will have
1882 variable size too. */
1883 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1884 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1885 {
1886 var_size = true;
1887 break;
1888 }
1889 }
1890
1891 /* If this record type is of variable size, make a parallel record type that
1892 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1893 if (var_size)
1894 {
1895 tree new_record_type
1896 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1897 ? UNION_TYPE : TREE_CODE (record_type));
1898 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1899 tree last_pos = bitsize_zero_node;
1900 tree old_field, prev_old_field = NULL_TREE;
1901
1902 new_name
1903 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1904 ? "XVU" : "XVE");
1905 TYPE_NAME (new_record_type) = new_name;
1906 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1907 TYPE_STUB_DECL (new_record_type)
1908 = create_type_stub_decl (new_name, new_record_type);
1909 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1910 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1911 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1912 TYPE_SIZE_UNIT (new_record_type)
1913 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1914
1915 /* Now scan all the fields, replacing each field with a new field
1916 corresponding to the new encoding. */
1917 for (old_field = TYPE_FIELDS (record_type); old_field;
1918 old_field = DECL_CHAIN (old_field))
1919 {
1920 tree field_type = TREE_TYPE (old_field);
1921 tree field_name = DECL_NAME (old_field);
1922 tree curpos = bit_position (old_field);
1923 tree pos, new_field;
1924 bool var = false;
1925 unsigned int align = 0;
1926
1927 /* We're going to do some pattern matching below so remove as many
1928 conversions as possible. */
1929 curpos = remove_conversions (curpos, true);
1930
1931 /* See how the position was modified from the last position.
1932
1933 There are two basic cases we support: a value was added
1934 to the last position or the last position was rounded to
1935 a boundary and they something was added. Check for the
1936 first case first. If not, see if there is any evidence
1937 of rounding. If so, round the last position and retry.
1938
1939 If this is a union, the position can be taken as zero. */
1940 if (TREE_CODE (new_record_type) == UNION_TYPE)
1941 pos = bitsize_zero_node;
1942 else
1943 pos = compute_related_constant (curpos, last_pos);
1944
1945 if (!pos
1946 && TREE_CODE (curpos) == MULT_EXPR
1947 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1948 {
1949 tree offset = TREE_OPERAND (curpos, 0);
1950 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1951 align = scale_by_factor_of (offset, align);
1952 last_pos = round_up (last_pos, align);
1953 pos = compute_related_constant (curpos, last_pos);
1954 }
1955 else if (!pos
1956 && TREE_CODE (curpos) == PLUS_EXPR
1957 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1958 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1959 && tree_fits_uhwi_p
1960 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1961 {
1962 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1963 unsigned HOST_WIDE_INT addend
1964 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1965 align
1966 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1967 align = scale_by_factor_of (offset, align);
1968 align = MIN (align, addend & -addend);
1969 last_pos = round_up (last_pos, align);
1970 pos = compute_related_constant (curpos, last_pos);
1971 }
1972 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1973 {
1974 align = TYPE_ALIGN (field_type);
1975 last_pos = round_up (last_pos, align);
1976 pos = compute_related_constant (curpos, last_pos);
1977 }
1978
1979 /* If we can't compute a position, set it to zero.
1980
1981 ??? We really should abort here, but it's too much work
1982 to get this correct for all cases. */
1983 if (!pos)
1984 pos = bitsize_zero_node;
1985
1986 /* See if this type is variable-sized and make a pointer type
1987 and indicate the indirection if so. Beware that the debug
1988 back-end may adjust the position computed above according
1989 to the alignment of the field type, i.e. the pointer type
1990 in this case, if we don't preventively counter that. */
1991 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1992 {
1993 field_type = build_pointer_type (field_type);
1994 if (align != 0 && TYPE_ALIGN (field_type) > align)
1995 {
1996 field_type = copy_node (field_type);
1997 TYPE_ALIGN (field_type) = align;
1998 }
1999 var = true;
2000 }
2001
2002 /* Make a new field name, if necessary. */
2003 if (var || align != 0)
2004 {
2005 char suffix[16];
2006
2007 if (align != 0)
2008 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2009 align / BITS_PER_UNIT);
2010 else
2011 strcpy (suffix, "XVL");
2012
2013 field_name = concat_name (field_name, suffix);
2014 }
2015
2016 new_field
2017 = create_field_decl (field_name, field_type, new_record_type,
2018 DECL_SIZE (old_field), pos, 0, 0);
2019 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2020 TYPE_FIELDS (new_record_type) = new_field;
2021
2022 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2023 zero. The only time it's not the last field of the record
2024 is when there are other components at fixed positions after
2025 it (meaning there was a rep clause for every field) and we
2026 want to be able to encode them. */
2027 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
2028 (TREE_CODE (TREE_TYPE (old_field))
2029 == QUAL_UNION_TYPE)
2030 ? bitsize_zero_node
2031 : DECL_SIZE (old_field));
2032 prev_old_field = old_field;
2033 }
2034
2035 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2036
2037 add_parallel_type (record_type, new_record_type);
2038 }
2039 }
2040
2041 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2042 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2043 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2044 replace a value of zero with the old size. If HAS_REP is true, we take the
2045 MAX of the end position of this field with LAST_SIZE. In all other cases,
2046 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2047
2048 static tree
2049 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2050 bool has_rep)
2051 {
2052 tree type = TREE_TYPE (last_size);
2053 tree new_size;
2054
2055 if (!special || TREE_CODE (size) != COND_EXPR)
2056 {
2057 new_size = size_binop (PLUS_EXPR, first_bit, size);
2058 if (has_rep)
2059 new_size = size_binop (MAX_EXPR, last_size, new_size);
2060 }
2061
2062 else
2063 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2064 integer_zerop (TREE_OPERAND (size, 1))
2065 ? last_size : merge_sizes (last_size, first_bit,
2066 TREE_OPERAND (size, 1),
2067 1, has_rep),
2068 integer_zerop (TREE_OPERAND (size, 2))
2069 ? last_size : merge_sizes (last_size, first_bit,
2070 TREE_OPERAND (size, 2),
2071 1, has_rep));
2072
2073 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2074 when fed through substitute_in_expr) into thinking that a constant
2075 size is not constant. */
2076 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2077 new_size = TREE_OPERAND (new_size, 0);
2078
2079 return new_size;
2080 }
2081
2082 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2083 related by the addition of a constant. Return that constant if so. */
2084
2085 static tree
2086 compute_related_constant (tree op0, tree op1)
2087 {
2088 tree op0_var, op1_var;
2089 tree op0_con = split_plus (op0, &op0_var);
2090 tree op1_con = split_plus (op1, &op1_var);
2091 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2092
2093 if (operand_equal_p (op0_var, op1_var, 0))
2094 return result;
2095 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2096 return result;
2097 else
2098 return 0;
2099 }
2100
2101 /* Utility function of above to split a tree OP which may be a sum, into a
2102 constant part, which is returned, and a variable part, which is stored
2103 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2104 bitsizetype. */
2105
2106 static tree
2107 split_plus (tree in, tree *pvar)
2108 {
2109 /* Strip conversions in order to ease the tree traversal and maximize the
2110 potential for constant or plus/minus discovery. We need to be careful
2111 to always return and set *pvar to bitsizetype trees, but it's worth
2112 the effort. */
2113 in = remove_conversions (in, false);
2114
2115 *pvar = convert (bitsizetype, in);
2116
2117 if (TREE_CODE (in) == INTEGER_CST)
2118 {
2119 *pvar = bitsize_zero_node;
2120 return convert (bitsizetype, in);
2121 }
2122 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2123 {
2124 tree lhs_var, rhs_var;
2125 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2126 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2127
2128 if (lhs_var == TREE_OPERAND (in, 0)
2129 && rhs_var == TREE_OPERAND (in, 1))
2130 return bitsize_zero_node;
2131
2132 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2133 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2134 }
2135 else
2136 return bitsize_zero_node;
2137 }
2138 \f
2139 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2140 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2141 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2142 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2143 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2144 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2145 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2146 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2147 invisible reference. */
2148
2149 tree
2150 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2151 bool return_unconstrained_p, bool return_by_direct_ref_p,
2152 bool return_by_invisi_ref_p)
2153 {
2154 /* A list of the data type nodes of the subprogram formal parameters.
2155 This list is generated by traversing the input list of PARM_DECL
2156 nodes. */
2157 vec<tree, va_gc> *param_type_list = NULL;
2158 tree t, type;
2159
2160 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2161 vec_safe_push (param_type_list, TREE_TYPE (t));
2162
2163 type = build_function_type_vec (return_type, param_type_list);
2164
2165 /* TYPE may have been shared since GCC hashes types. If it has a different
2166 CICO_LIST, make a copy. Likewise for the various flags. */
2167 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2168 return_by_direct_ref_p, return_by_invisi_ref_p))
2169 {
2170 type = copy_type (type);
2171 TYPE_CI_CO_LIST (type) = cico_list;
2172 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2173 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2174 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2175 }
2176
2177 return type;
2178 }
2179 \f
2180 /* Return a copy of TYPE but safe to modify in any way. */
2181
2182 tree
2183 copy_type (tree type)
2184 {
2185 tree new_type = copy_node (type);
2186
2187 /* Unshare the language-specific data. */
2188 if (TYPE_LANG_SPECIFIC (type))
2189 {
2190 TYPE_LANG_SPECIFIC (new_type) = NULL;
2191 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2192 }
2193
2194 /* And the contents of the language-specific slot if needed. */
2195 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2196 && TYPE_RM_VALUES (type))
2197 {
2198 TYPE_RM_VALUES (new_type) = NULL_TREE;
2199 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2200 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2201 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2202 }
2203
2204 /* copy_node clears this field instead of copying it, because it is
2205 aliased with TREE_CHAIN. */
2206 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2207
2208 TYPE_POINTER_TO (new_type) = 0;
2209 TYPE_REFERENCE_TO (new_type) = 0;
2210 TYPE_MAIN_VARIANT (new_type) = new_type;
2211 TYPE_NEXT_VARIANT (new_type) = 0;
2212 TYPE_CANONICAL (new_type) = new_type;
2213
2214 return new_type;
2215 }
2216 \f
2217 /* Return a subtype of sizetype with range MIN to MAX and whose
2218 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2219 of the associated TYPE_DECL. */
2220
2221 tree
2222 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2223 {
2224 /* First build a type for the desired range. */
2225 tree type = build_nonshared_range_type (sizetype, min, max);
2226
2227 /* Then set the index type. */
2228 SET_TYPE_INDEX_TYPE (type, index);
2229 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2230
2231 return type;
2232 }
2233
2234 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2235 sizetype is used. */
2236
2237 tree
2238 create_range_type (tree type, tree min, tree max)
2239 {
2240 tree range_type;
2241
2242 if (type == NULL_TREE)
2243 type = sizetype;
2244
2245 /* First build a type with the base range. */
2246 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2247 TYPE_MAX_VALUE (type));
2248
2249 /* Then set the actual range. */
2250 SET_TYPE_RM_MIN_VALUE (range_type, min);
2251 SET_TYPE_RM_MAX_VALUE (range_type, max);
2252
2253 return range_type;
2254 }
2255 \f
2256 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2257 NAME gives the name of the type to be used in the declaration. */
2258
2259 tree
2260 create_type_stub_decl (tree name, tree type)
2261 {
2262 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2263 DECL_ARTIFICIAL (type_decl) = 1;
2264 TYPE_ARTIFICIAL (type) = 1;
2265 return type_decl;
2266 }
2267
2268 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2269 used in the declaration. ARTIFICIAL_P is true if the declaration was
2270 generated by the compiler. DEBUG_INFO_P is true if we need to write
2271 debug information about this type. GNAT_NODE is used for the position
2272 of the decl. */
2273
2274 tree
2275 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2276 Node_Id gnat_node)
2277 {
2278 enum tree_code code = TREE_CODE (type);
2279 bool is_named
2280 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2281 tree type_decl;
2282
2283 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2284 gcc_assert (!TYPE_IS_DUMMY_P (type));
2285
2286 /* If the type hasn't been named yet, we're naming it; preserve an existing
2287 TYPE_STUB_DECL that has been attached to it for some purpose. */
2288 if (!is_named && TYPE_STUB_DECL (type))
2289 {
2290 type_decl = TYPE_STUB_DECL (type);
2291 DECL_NAME (type_decl) = name;
2292 }
2293 else
2294 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2295
2296 DECL_ARTIFICIAL (type_decl) = artificial_p;
2297 TYPE_ARTIFICIAL (type) = artificial_p;
2298
2299 /* Add this decl to the current binding level. */
2300 gnat_pushdecl (type_decl, gnat_node);
2301
2302 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2303 causes the name to be also viewed as a "tag" by the debug back-end, with
2304 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2305 types in DWARF.
2306
2307 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2308 from multiple contexts, and "type_decl" references a copy of it: in such a
2309 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2310 with the mechanism above. */
2311 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2312 TYPE_STUB_DECL (type) = type_decl;
2313
2314 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2315 back-end doesn't support, and for others if we don't need to. */
2316 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2317 DECL_IGNORED_P (type_decl) = 1;
2318
2319 return type_decl;
2320 }
2321 \f
2322 /* Return a VAR_DECL or CONST_DECL node.
2323
2324 NAME gives the name of the variable. ASM_NAME is its assembler name
2325 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2326 the GCC tree for an optional initial expression; NULL_TREE if none.
2327
2328 CONST_FLAG is true if this variable is constant, in which case we might
2329 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2330
2331 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2332 definition to be made visible outside of the current compilation unit, for
2333 instance variable definitions in a package specification.
2334
2335 EXTERN_FLAG is true when processing an external variable declaration (as
2336 opposed to a definition: no storage is to be allocated for the variable).
2337
2338 STATIC_FLAG is only relevant when not at top level and indicates whether
2339 to always allocate storage to the variable.
2340
2341 VOLATILE_FLAG is true if this variable is declared as volatile.
2342
2343 ARTIFICIAL_P is true if the variable was generated by the compiler.
2344
2345 DEBUG_INFO_P is true if we need to write debug information for it.
2346
2347 GNAT_NODE is used for the position of the decl. */
2348
2349 tree
2350 create_var_decl (tree name, tree asm_name, tree type, tree init,
2351 bool const_flag, bool public_flag, bool extern_flag,
2352 bool static_flag, bool volatile_flag, bool artificial_p,
2353 bool debug_info_p, struct attrib *attr_list,
2354 Node_Id gnat_node, bool const_decl_allowed_p)
2355 {
2356 /* Whether the object has static storage duration, either explicitly or by
2357 virtue of being declared at the global level. */
2358 const bool static_storage = static_flag || global_bindings_p ();
2359
2360 /* Whether the initializer is constant: for an external object or an object
2361 with static storage duration, we check that the initializer is a valid
2362 constant expression for initializing a static variable; otherwise, we
2363 only check that it is constant. */
2364 const bool init_const
2365 = (init
2366 && gnat_types_compatible_p (type, TREE_TYPE (init))
2367 && (extern_flag || static_storage
2368 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2369 != NULL_TREE
2370 : TREE_CONSTANT (init)));
2371
2372 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2373 case the initializer may be used in lieu of the DECL node (as done in
2374 Identifier_to_gnu). This is useful to prevent the need of elaboration
2375 code when an identifier for which such a DECL is made is in turn used
2376 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2377 but extra constraints apply to this choice (see below) and they are not
2378 relevant to the distinction we wish to make. */
2379 const bool constant_p = const_flag && init_const;
2380
2381 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2382 and may be used for scalars in general but not for aggregates. */
2383 tree var_decl
2384 = build_decl (input_location,
2385 (constant_p && const_decl_allowed_p
2386 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2387 name, type);
2388
2389 /* If this is external, throw away any initializations (they will be done
2390 elsewhere) unless this is a constant for which we would like to remain
2391 able to get the initializer. If we are defining a global here, leave a
2392 constant initialization and save any variable elaborations for the
2393 elaboration routine. If we are just annotating types, throw away the
2394 initialization if it isn't a constant. */
2395 if ((extern_flag && !constant_p)
2396 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2397 init = NULL_TREE;
2398
2399 /* At the global level, a non-constant initializer generates elaboration
2400 statements. Check that such statements are allowed, that is to say,
2401 not violating a No_Elaboration_Code restriction. */
2402 if (init && !init_const && global_bindings_p ())
2403 Check_Elaboration_Code_Allowed (gnat_node);
2404
2405 /* Attach the initializer, if any. */
2406 DECL_INITIAL (var_decl) = init;
2407
2408 /* Directly set some flags. */
2409 DECL_ARTIFICIAL (var_decl) = artificial_p;
2410 DECL_EXTERNAL (var_decl) = extern_flag;
2411
2412 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2413 try to fiddle with DECL_COMMON. However, on platforms that don't
2414 support global BSS sections, uninitialized global variables would
2415 go in DATA instead, thus increasing the size of the executable. */
2416 if (!flag_no_common
2417 && TREE_CODE (var_decl) == VAR_DECL
2418 && TREE_PUBLIC (var_decl)
2419 && !have_global_bss_p ())
2420 DECL_COMMON (var_decl) = 1;
2421
2422 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2423 since we will create an associated variable. Likewise for an external
2424 constant whose initializer is not absolute, because this would mean a
2425 global relocation in a read-only section which runs afoul of the PE-COFF
2426 run-time relocation mechanism. */
2427 if (!debug_info_p
2428 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2429 || (extern_flag
2430 && constant_p
2431 && init
2432 && initializer_constant_valid_p (init, TREE_TYPE (init))
2433 != null_pointer_node))
2434 DECL_IGNORED_P (var_decl) = 1;
2435
2436 TREE_CONSTANT (var_decl) = constant_p;
2437 TREE_READONLY (var_decl) = const_flag;
2438
2439 /* The object is public if it is external or if it is declared public
2440 and has static storage duration. */
2441 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2442
2443 /* We need to allocate static storage for an object with static storage
2444 duration if it isn't external. */
2445 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2446
2447 TREE_SIDE_EFFECTS (var_decl)
2448 = TREE_THIS_VOLATILE (var_decl)
2449 = TYPE_VOLATILE (type) | volatile_flag;
2450
2451 if (TREE_SIDE_EFFECTS (var_decl))
2452 TREE_ADDRESSABLE (var_decl) = 1;
2453
2454 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2455 if (TREE_CODE (var_decl) == VAR_DECL)
2456 process_attributes (&var_decl, &attr_list, true, gnat_node);
2457
2458 /* Add this decl to the current binding level. */
2459 gnat_pushdecl (var_decl, gnat_node);
2460
2461 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2462 {
2463 /* Let the target mangle the name if this isn't a verbatim asm. */
2464 if (*IDENTIFIER_POINTER (asm_name) != '*')
2465 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2466
2467 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2468 }
2469
2470 return var_decl;
2471 }
2472 \f
2473 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2474
2475 static bool
2476 aggregate_type_contains_array_p (tree type)
2477 {
2478 switch (TREE_CODE (type))
2479 {
2480 case RECORD_TYPE:
2481 case UNION_TYPE:
2482 case QUAL_UNION_TYPE:
2483 {
2484 tree field;
2485 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2486 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2487 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2488 return true;
2489 return false;
2490 }
2491
2492 case ARRAY_TYPE:
2493 return true;
2494
2495 default:
2496 gcc_unreachable ();
2497 }
2498 }
2499
2500 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2501 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2502 is the specified size of the field. If POS is nonzero, it is the bit
2503 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2504 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2505 means we are allowed to take the address of the field; if it is negative,
2506 we should not make a bitfield, which is used by make_aligning_type. */
2507
2508 tree
2509 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2510 int packed, int addressable)
2511 {
2512 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2513
2514 DECL_CONTEXT (field_decl) = record_type;
2515 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2516
2517 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2518 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2519 Likewise for an aggregate without specified position that contains an
2520 array, because in this case slices of variable length of this array
2521 must be handled by GCC and variable-sized objects need to be aligned
2522 to at least a byte boundary. */
2523 if (packed && (TYPE_MODE (type) == BLKmode
2524 || (!pos
2525 && AGGREGATE_TYPE_P (type)
2526 && aggregate_type_contains_array_p (type))))
2527 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2528
2529 /* If a size is specified, use it. Otherwise, if the record type is packed
2530 compute a size to use, which may differ from the object's natural size.
2531 We always set a size in this case to trigger the checks for bitfield
2532 creation below, which is typically required when no position has been
2533 specified. */
2534 if (size)
2535 size = convert (bitsizetype, size);
2536 else if (packed == 1)
2537 {
2538 size = rm_size (type);
2539 if (TYPE_MODE (type) == BLKmode)
2540 size = round_up (size, BITS_PER_UNIT);
2541 }
2542
2543 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2544 specified for two reasons: first if the size differs from the natural
2545 size. Second, if the alignment is insufficient. There are a number of
2546 ways the latter can be true.
2547
2548 We never make a bitfield if the type of the field has a nonconstant size,
2549 because no such entity requiring bitfield operations should reach here.
2550
2551 We do *preventively* make a bitfield when there might be the need for it
2552 but we don't have all the necessary information to decide, as is the case
2553 of a field with no specified position in a packed record.
2554
2555 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2556 in layout_decl or finish_record_type to clear the bit_field indication if
2557 it is in fact not needed. */
2558 if (addressable >= 0
2559 && size
2560 && TREE_CODE (size) == INTEGER_CST
2561 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2562 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2563 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2564 || packed
2565 || (TYPE_ALIGN (record_type) != 0
2566 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2567 {
2568 DECL_BIT_FIELD (field_decl) = 1;
2569 DECL_SIZE (field_decl) = size;
2570 if (!packed && !pos)
2571 {
2572 if (TYPE_ALIGN (record_type) != 0
2573 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2574 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2575 else
2576 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
2577 }
2578 }
2579
2580 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2581
2582 /* Bump the alignment if need be, either for bitfield/packing purposes or
2583 to satisfy the type requirements if no such consideration applies. When
2584 we get the alignment from the type, indicate if this is from an explicit
2585 user request, which prevents stor-layout from lowering it later on. */
2586 {
2587 unsigned int bit_align
2588 = (DECL_BIT_FIELD (field_decl) ? 1
2589 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2590
2591 if (bit_align > DECL_ALIGN (field_decl))
2592 DECL_ALIGN (field_decl) = bit_align;
2593 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2594 {
2595 DECL_ALIGN (field_decl) = TYPE_ALIGN (type);
2596 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2597 }
2598 }
2599
2600 if (pos)
2601 {
2602 /* We need to pass in the alignment the DECL is known to have.
2603 This is the lowest-order bit set in POS, but no more than
2604 the alignment of the record, if one is specified. Note
2605 that an alignment of 0 is taken as infinite. */
2606 unsigned int known_align;
2607
2608 if (tree_fits_uhwi_p (pos))
2609 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2610 else
2611 known_align = BITS_PER_UNIT;
2612
2613 if (TYPE_ALIGN (record_type)
2614 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2615 known_align = TYPE_ALIGN (record_type);
2616
2617 layout_decl (field_decl, known_align);
2618 SET_DECL_OFFSET_ALIGN (field_decl,
2619 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2620 : BITS_PER_UNIT);
2621 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2622 &DECL_FIELD_BIT_OFFSET (field_decl),
2623 DECL_OFFSET_ALIGN (field_decl), pos);
2624 }
2625
2626 /* In addition to what our caller says, claim the field is addressable if we
2627 know that its type is not suitable.
2628
2629 The field may also be "technically" nonaddressable, meaning that even if
2630 we attempt to take the field's address we will actually get the address
2631 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2632 value we have at this point is not accurate enough, so we don't account
2633 for this here and let finish_record_type decide. */
2634 if (!addressable && !type_for_nonaliased_component_p (type))
2635 addressable = 1;
2636
2637 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2638
2639 return field_decl;
2640 }
2641 \f
2642 /* Return a PARM_DECL node. NAME is the name of the parameter and TYPE is
2643 its type. READONLY is true if the parameter is readonly (either an In
2644 parameter or an address of a pass-by-ref parameter). */
2645
2646 tree
2647 create_param_decl (tree name, tree type, bool readonly)
2648 {
2649 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2650
2651 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2652 can lead to various ABI violations. */
2653 if (targetm.calls.promote_prototypes (NULL_TREE)
2654 && INTEGRAL_TYPE_P (type)
2655 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2656 {
2657 /* We have to be careful about biased types here. Make a subtype
2658 of integer_type_node with the proper biasing. */
2659 if (TREE_CODE (type) == INTEGER_TYPE
2660 && TYPE_BIASED_REPRESENTATION_P (type))
2661 {
2662 tree subtype
2663 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2664 TREE_TYPE (subtype) = integer_type_node;
2665 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2666 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2667 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2668 type = subtype;
2669 }
2670 else
2671 type = integer_type_node;
2672 }
2673
2674 DECL_ARG_TYPE (param_decl) = type;
2675 TREE_READONLY (param_decl) = readonly;
2676 return param_decl;
2677 }
2678 \f
2679 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2680 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2681 changed. GNAT_NODE is used for the position of error messages. */
2682
2683 void
2684 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2685 Node_Id gnat_node)
2686 {
2687 struct attrib *attr;
2688
2689 for (attr = *attr_list; attr; attr = attr->next)
2690 switch (attr->type)
2691 {
2692 case ATTR_MACHINE_ATTRIBUTE:
2693 Sloc_to_locus (Sloc (gnat_node), &input_location);
2694 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2695 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2696 break;
2697
2698 case ATTR_LINK_ALIAS:
2699 if (!DECL_EXTERNAL (*node))
2700 {
2701 TREE_STATIC (*node) = 1;
2702 assemble_alias (*node, attr->name);
2703 }
2704 break;
2705
2706 case ATTR_WEAK_EXTERNAL:
2707 if (SUPPORTS_WEAK)
2708 declare_weak (*node);
2709 else
2710 post_error ("?weak declarations not supported on this target",
2711 attr->error_point);
2712 break;
2713
2714 case ATTR_LINK_SECTION:
2715 if (targetm_common.have_named_sections)
2716 {
2717 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2718 DECL_COMMON (*node) = 0;
2719 }
2720 else
2721 post_error ("?section attributes are not supported for this target",
2722 attr->error_point);
2723 break;
2724
2725 case ATTR_LINK_CONSTRUCTOR:
2726 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2727 TREE_USED (*node) = 1;
2728 break;
2729
2730 case ATTR_LINK_DESTRUCTOR:
2731 DECL_STATIC_DESTRUCTOR (*node) = 1;
2732 TREE_USED (*node) = 1;
2733 break;
2734
2735 case ATTR_THREAD_LOCAL_STORAGE:
2736 set_decl_tls_model (*node, decl_default_tls_model (*node));
2737 DECL_COMMON (*node) = 0;
2738 break;
2739 }
2740
2741 *attr_list = NULL;
2742 }
2743
2744 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2745 a power of 2. */
2746
2747 bool
2748 value_factor_p (tree value, HOST_WIDE_INT factor)
2749 {
2750 if (tree_fits_uhwi_p (value))
2751 return tree_to_uhwi (value) % factor == 0;
2752
2753 if (TREE_CODE (value) == MULT_EXPR)
2754 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2755 || value_factor_p (TREE_OPERAND (value, 1), factor));
2756
2757 return false;
2758 }
2759
2760 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2761 from the parameter association for the instantiation of a generic. We do
2762 not want to emit source location for them: the code generated for their
2763 initialization is likely to disturb debugging. */
2764
2765 bool
2766 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2767 {
2768 if (Nkind (gnat_node) != N_Defining_Identifier
2769 || !IN (Ekind (gnat_node), Object_Kind)
2770 || Comes_From_Source (gnat_node)
2771 || !Present (Renamed_Object (gnat_node)))
2772 return false;
2773
2774 /* Get the object declaration of the renamed object, if any and if the
2775 renamed object is a mere identifier. */
2776 gnat_node = Renamed_Object (gnat_node);
2777 if (Nkind (gnat_node) != N_Identifier)
2778 return false;
2779
2780 gnat_node = Entity (gnat_node);
2781 if (!Present (Parent (gnat_node)))
2782 return false;
2783
2784 gnat_node = Parent (gnat_node);
2785 return
2786 (Present (gnat_node)
2787 && Nkind (gnat_node) == N_Object_Declaration
2788 && Present (Corresponding_Generic_Association (gnat_node)));
2789 }
2790
2791 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2792 feed it with the elaboration of GNAT_SCOPE. */
2793
2794 static struct deferred_decl_context_node *
2795 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2796 {
2797 struct deferred_decl_context_node *new_node;
2798
2799 new_node
2800 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2801 new_node->decl = decl;
2802 new_node->gnat_scope = gnat_scope;
2803 new_node->force_global = force_global;
2804 new_node->types.create (1);
2805 new_node->next = deferred_decl_context_queue;
2806 deferred_decl_context_queue = new_node;
2807 return new_node;
2808 }
2809
2810 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2811 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2812 computed. */
2813
2814 static void
2815 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2816 {
2817 n->types.safe_push (type);
2818 }
2819
2820 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2821 NULL_TREE if it is not available. */
2822
2823 static tree
2824 compute_deferred_decl_context (Entity_Id gnat_scope)
2825 {
2826 tree context;
2827
2828 if (present_gnu_tree (gnat_scope))
2829 context = get_gnu_tree (gnat_scope);
2830 else
2831 return NULL_TREE;
2832
2833 if (TREE_CODE (context) == TYPE_DECL)
2834 {
2835 const tree context_type = TREE_TYPE (context);
2836
2837 /* Skip dummy types: only the final ones can appear in the context
2838 chain. */
2839 if (TYPE_DUMMY_P (context_type))
2840 return NULL_TREE;
2841
2842 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2843 chain. */
2844 else
2845 context = context_type;
2846 }
2847
2848 return context;
2849 }
2850
2851 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2852 that cannot be processed yet, remove the other ones. If FORCE is true,
2853 force the processing for all nodes, use the global context when nodes don't
2854 have a GNU translation. */
2855
2856 void
2857 process_deferred_decl_context (bool force)
2858 {
2859 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2860 struct deferred_decl_context_node *node;
2861
2862 while (*it != NULL)
2863 {
2864 bool processed = false;
2865 tree context = NULL_TREE;
2866 Entity_Id gnat_scope;
2867
2868 node = *it;
2869
2870 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2871 get the first scope. */
2872 gnat_scope = node->gnat_scope;
2873 while (Present (gnat_scope))
2874 {
2875 context = compute_deferred_decl_context (gnat_scope);
2876 if (!force || context != NULL_TREE)
2877 break;
2878 gnat_scope = get_debug_scope (gnat_scope, NULL);
2879 }
2880
2881 /* Imported declarations must not be in a local context (i.e. not inside
2882 a function). */
2883 if (context != NULL_TREE && node->force_global > 0)
2884 {
2885 tree ctx = context;
2886
2887 while (ctx != NULL_TREE)
2888 {
2889 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2890 ctx = (DECL_P (ctx))
2891 ? DECL_CONTEXT (ctx)
2892 : TYPE_CONTEXT (ctx);
2893 }
2894 }
2895
2896 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2897 was no elaborated scope, use the global context. */
2898 if (force && context == NULL_TREE)
2899 context = get_global_context ();
2900
2901 if (context != NULL_TREE)
2902 {
2903 tree t;
2904 int i;
2905
2906 DECL_CONTEXT (node->decl) = context;
2907
2908 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2909 ..._TYPE nodes. */
2910 FOR_EACH_VEC_ELT (node->types, i, t)
2911 {
2912 gnat_set_type_context (t, context);
2913 }
2914 processed = true;
2915 }
2916
2917 /* If this node has been successfuly processed, remove it from the
2918 queue. Then move to the next node. */
2919 if (processed)
2920 {
2921 *it = node->next;
2922 node->types.release ();
2923 free (node);
2924 }
2925 else
2926 it = &node->next;
2927 }
2928 }
2929
2930
2931 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2932
2933 static unsigned int
2934 scale_by_factor_of (tree expr, unsigned int value)
2935 {
2936 unsigned HOST_WIDE_INT addend = 0;
2937 unsigned HOST_WIDE_INT factor = 1;
2938
2939 /* Peel conversions around EXPR and try to extract bodies from function
2940 calls: it is possible to get the scale factor from size functions. */
2941 expr = remove_conversions (expr, true);
2942 if (TREE_CODE (expr) == CALL_EXPR)
2943 expr = maybe_inline_call_in_expr (expr);
2944
2945 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
2946 multiple of the scale factor we are looking for. */
2947 if (TREE_CODE (expr) == PLUS_EXPR
2948 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
2949 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
2950 {
2951 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2952 expr = TREE_OPERAND (expr, 0);
2953 }
2954
2955 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2956 corresponding to the number of trailing zeros of the mask. */
2957 if (TREE_CODE (expr) == BIT_AND_EXPR
2958 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2959 {
2960 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2961 unsigned int i = 0;
2962
2963 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2964 {
2965 mask >>= 1;
2966 factor *= 2;
2967 i++;
2968 }
2969 }
2970
2971 /* If the addend is not a multiple of the factor we found, give up. In
2972 theory we could find a smaller common factor but it's useless for our
2973 needs. This situation arises when dealing with a field F1 with no
2974 alignment requirement but that is following a field F2 with such
2975 requirements. As long as we have F2's offset, we don't need alignment
2976 information to compute F1's. */
2977 if (addend % factor != 0)
2978 factor = 1;
2979
2980 return factor * value;
2981 }
2982
2983 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2984 unless we can prove these 2 fields are laid out in such a way that no gap
2985 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2986 is the distance in bits between the end of PREV_FIELD and the starting
2987 position of CURR_FIELD. It is ignored if null. */
2988
2989 static bool
2990 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2991 {
2992 /* If this is the first field of the record, there cannot be any gap */
2993 if (!prev_field)
2994 return false;
2995
2996 /* If the previous field is a union type, then return false: The only
2997 time when such a field is not the last field of the record is when
2998 there are other components at fixed positions after it (meaning there
2999 was a rep clause for every field), in which case we don't want the
3000 alignment constraint to override them. */
3001 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3002 return false;
3003
3004 /* If the distance between the end of prev_field and the beginning of
3005 curr_field is constant, then there is a gap if the value of this
3006 constant is not null. */
3007 if (offset && tree_fits_uhwi_p (offset))
3008 return !integer_zerop (offset);
3009
3010 /* If the size and position of the previous field are constant,
3011 then check the sum of this size and position. There will be a gap
3012 iff it is not multiple of the current field alignment. */
3013 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3014 && tree_fits_uhwi_p (bit_position (prev_field)))
3015 return ((tree_to_uhwi (bit_position (prev_field))
3016 + tree_to_uhwi (DECL_SIZE (prev_field)))
3017 % DECL_ALIGN (curr_field) != 0);
3018
3019 /* If both the position and size of the previous field are multiples
3020 of the current field alignment, there cannot be any gap. */
3021 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3022 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3023 return false;
3024
3025 /* Fallback, return that there may be a potential gap */
3026 return true;
3027 }
3028
3029 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3030 the decl. */
3031
3032 tree
3033 create_label_decl (tree name, Node_Id gnat_node)
3034 {
3035 tree label_decl
3036 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3037
3038 DECL_MODE (label_decl) = VOIDmode;
3039
3040 /* Add this decl to the current binding level. */
3041 gnat_pushdecl (label_decl, gnat_node);
3042
3043 return label_decl;
3044 }
3045 \f
3046 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3047 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3048 the list of its parameters (a list of PARM_DECL nodes chained through the
3049 DECL_CHAIN field).
3050
3051 INLINE_STATUS, CONST_FLAG, PUBLIC_FLAG, EXTERN_FLAG, VOLATILE_FLAG as well
3052 as ATTR_LIST are used to set the appropriate fields in the FUNCTION_DECL.
3053
3054 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3055
3056 DEBUG_INFO_P is true if we need to write debug information for it.
3057
3058 GNAT_NODE is used for the position of the decl. */
3059
3060 tree
3061 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3062 enum inline_status_t inline_status, bool const_flag,
3063 bool public_flag, bool extern_flag, bool volatile_flag,
3064 bool artificial_p, bool debug_info_p,
3065 struct attrib *attr_list, Node_Id gnat_node)
3066 {
3067 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3068 tree result_decl
3069 = build_decl (input_location, RESULT_DECL, NULL_TREE, TREE_TYPE (type));
3070 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3071
3072 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3073 DECL_EXTERNAL (subprog_decl) = extern_flag;
3074
3075 switch (inline_status)
3076 {
3077 case is_suppressed:
3078 DECL_UNINLINABLE (subprog_decl) = 1;
3079 break;
3080
3081 case is_disabled:
3082 break;
3083
3084 case is_required:
3085 if (Back_End_Inlining)
3086 decl_attributes (&subprog_decl,
3087 tree_cons (get_identifier ("always_inline"),
3088 NULL_TREE, NULL_TREE),
3089 ATTR_FLAG_TYPE_IN_PLACE);
3090
3091 /* ... fall through ... */
3092
3093 case is_enabled:
3094 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3095 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3096 break;
3097
3098 default:
3099 gcc_unreachable ();
3100 }
3101
3102 if (!debug_info_p)
3103 DECL_IGNORED_P (subprog_decl) = 1;
3104
3105 TREE_READONLY (subprog_decl) = TYPE_READONLY (type) | const_flag;
3106 TREE_PUBLIC (subprog_decl) = public_flag;
3107 TREE_SIDE_EFFECTS (subprog_decl)
3108 = TREE_THIS_VOLATILE (subprog_decl)
3109 = TYPE_VOLATILE (type) | volatile_flag;
3110
3111 DECL_ARTIFICIAL (result_decl) = 1;
3112 DECL_IGNORED_P (result_decl) = 1;
3113 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3114 DECL_RESULT (subprog_decl) = result_decl;
3115
3116 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3117
3118 /* Add this decl to the current binding level. */
3119 gnat_pushdecl (subprog_decl, gnat_node);
3120
3121 if (asm_name)
3122 {
3123 /* Let the target mangle the name if this isn't a verbatim asm. */
3124 if (*IDENTIFIER_POINTER (asm_name) != '*')
3125 asm_name = targetm.mangle_decl_assembler_name (subprog_decl, asm_name);
3126
3127 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3128
3129 /* The expand_main_function circuitry expects "main_identifier_node" to
3130 designate the DECL_NAME of the 'main' entry point, in turn expected
3131 to be declared as the "main" function literally by default. Ada
3132 program entry points are typically declared with a different name
3133 within the binder generated file, exported as 'main' to satisfy the
3134 system expectations. Force main_identifier_node in this case. */
3135 if (asm_name == main_identifier_node)
3136 DECL_NAME (subprog_decl) = main_identifier_node;
3137 }
3138
3139 /* Output the assembler code and/or RTL for the declaration. */
3140 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3141
3142 return subprog_decl;
3143 }
3144 \f
3145 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3146 body. This routine needs to be invoked before processing the declarations
3147 appearing in the subprogram. */
3148
3149 void
3150 begin_subprog_body (tree subprog_decl)
3151 {
3152 tree param_decl;
3153
3154 announce_function (subprog_decl);
3155
3156 /* This function is being defined. */
3157 TREE_STATIC (subprog_decl) = 1;
3158
3159 /* The failure of this assertion will likely come from a wrong context for
3160 the subprogram body, e.g. another procedure for a procedure declared at
3161 library level. */
3162 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3163
3164 current_function_decl = subprog_decl;
3165
3166 /* Enter a new binding level and show that all the parameters belong to
3167 this function. */
3168 gnat_pushlevel ();
3169
3170 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3171 param_decl = DECL_CHAIN (param_decl))
3172 DECL_CONTEXT (param_decl) = subprog_decl;
3173
3174 make_decl_rtl (subprog_decl);
3175 }
3176
3177 /* Finish translating the current subprogram and set its BODY. */
3178
3179 void
3180 end_subprog_body (tree body)
3181 {
3182 tree fndecl = current_function_decl;
3183
3184 /* Attach the BLOCK for this level to the function and pop the level. */
3185 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3186 DECL_INITIAL (fndecl) = current_binding_level->block;
3187 gnat_poplevel ();
3188
3189 /* Mark the RESULT_DECL as being in this subprogram. */
3190 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3191
3192 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3193 if (TREE_CODE (body) == BIND_EXPR)
3194 {
3195 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3196 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3197 }
3198
3199 DECL_SAVED_TREE (fndecl) = body;
3200
3201 current_function_decl = decl_function_context (fndecl);
3202 }
3203
3204 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3205
3206 void
3207 rest_of_subprog_body_compilation (tree subprog_decl)
3208 {
3209 /* We cannot track the location of errors past this point. */
3210 error_gnat_node = Empty;
3211
3212 /* If we're only annotating types, don't actually compile this function. */
3213 if (type_annotate_only)
3214 return;
3215
3216 /* Dump functions before gimplification. */
3217 dump_function (TDI_original, subprog_decl);
3218
3219 if (!decl_function_context (subprog_decl))
3220 cgraph_node::finalize_function (subprog_decl, false);
3221 else
3222 /* Register this function with cgraph just far enough to get it
3223 added to our parent's nested function list. */
3224 (void) cgraph_node::get_create (subprog_decl);
3225 }
3226
3227 tree
3228 gnat_builtin_function (tree decl)
3229 {
3230 gnat_pushdecl (decl, Empty);
3231 return decl;
3232 }
3233
3234 /* Return an integer type with the number of bits of precision given by
3235 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3236 it is a signed type. */
3237
3238 tree
3239 gnat_type_for_size (unsigned precision, int unsignedp)
3240 {
3241 tree t;
3242 char type_name[20];
3243
3244 if (precision <= 2 * MAX_BITS_PER_WORD
3245 && signed_and_unsigned_types[precision][unsignedp])
3246 return signed_and_unsigned_types[precision][unsignedp];
3247
3248 if (unsignedp)
3249 t = make_unsigned_type (precision);
3250 else
3251 t = make_signed_type (precision);
3252
3253 if (precision <= 2 * MAX_BITS_PER_WORD)
3254 signed_and_unsigned_types[precision][unsignedp] = t;
3255
3256 if (!TYPE_NAME (t))
3257 {
3258 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3259 TYPE_NAME (t) = get_identifier (type_name);
3260 }
3261
3262 return t;
3263 }
3264
3265 /* Likewise for floating-point types. */
3266
3267 static tree
3268 float_type_for_precision (int precision, machine_mode mode)
3269 {
3270 tree t;
3271 char type_name[20];
3272
3273 if (float_types[(int) mode])
3274 return float_types[(int) mode];
3275
3276 float_types[(int) mode] = t = make_node (REAL_TYPE);
3277 TYPE_PRECISION (t) = precision;
3278 layout_type (t);
3279
3280 gcc_assert (TYPE_MODE (t) == mode);
3281 if (!TYPE_NAME (t))
3282 {
3283 sprintf (type_name, "FLOAT_%d", precision);
3284 TYPE_NAME (t) = get_identifier (type_name);
3285 }
3286
3287 return t;
3288 }
3289
3290 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3291 an unsigned type; otherwise a signed type is returned. */
3292
3293 tree
3294 gnat_type_for_mode (machine_mode mode, int unsignedp)
3295 {
3296 if (mode == BLKmode)
3297 return NULL_TREE;
3298
3299 if (mode == VOIDmode)
3300 return void_type_node;
3301
3302 if (COMPLEX_MODE_P (mode))
3303 return NULL_TREE;
3304
3305 if (SCALAR_FLOAT_MODE_P (mode))
3306 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3307
3308 if (SCALAR_INT_MODE_P (mode))
3309 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3310
3311 if (VECTOR_MODE_P (mode))
3312 {
3313 machine_mode inner_mode = GET_MODE_INNER (mode);
3314 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3315 if (inner_type)
3316 return build_vector_type_for_mode (inner_type, mode);
3317 }
3318
3319 return NULL_TREE;
3320 }
3321
3322 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3323
3324 tree
3325 gnat_unsigned_type (tree type_node)
3326 {
3327 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3328
3329 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3330 {
3331 type = copy_node (type);
3332 TREE_TYPE (type) = type_node;
3333 }
3334 else if (TREE_TYPE (type_node)
3335 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3336 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3337 {
3338 type = copy_node (type);
3339 TREE_TYPE (type) = TREE_TYPE (type_node);
3340 }
3341
3342 return type;
3343 }
3344
3345 /* Return the signed version of a TYPE_NODE, a scalar type. */
3346
3347 tree
3348 gnat_signed_type (tree type_node)
3349 {
3350 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3351
3352 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3353 {
3354 type = copy_node (type);
3355 TREE_TYPE (type) = type_node;
3356 }
3357 else if (TREE_TYPE (type_node)
3358 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3359 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3360 {
3361 type = copy_node (type);
3362 TREE_TYPE (type) = TREE_TYPE (type_node);
3363 }
3364
3365 return type;
3366 }
3367
3368 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3369 transparently converted to each other. */
3370
3371 int
3372 gnat_types_compatible_p (tree t1, tree t2)
3373 {
3374 enum tree_code code;
3375
3376 /* This is the default criterion. */
3377 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3378 return 1;
3379
3380 /* We only check structural equivalence here. */
3381 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3382 return 0;
3383
3384 /* Vector types are also compatible if they have the same number of subparts
3385 and the same form of (scalar) element type. */
3386 if (code == VECTOR_TYPE
3387 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3388 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3389 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3390 return 1;
3391
3392 /* Array types are also compatible if they are constrained and have the same
3393 domain(s), the same component type and the same scalar storage order. */
3394 if (code == ARRAY_TYPE
3395 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3396 || (TYPE_DOMAIN (t1)
3397 && TYPE_DOMAIN (t2)
3398 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3399 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3400 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3401 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3402 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3403 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3404 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3405 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3406 return 1;
3407
3408 return 0;
3409 }
3410
3411 /* Return true if EXPR is a useless type conversion. */
3412
3413 bool
3414 gnat_useless_type_conversion (tree expr)
3415 {
3416 if (CONVERT_EXPR_P (expr)
3417 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3418 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3419 return gnat_types_compatible_p (TREE_TYPE (expr),
3420 TREE_TYPE (TREE_OPERAND (expr, 0)));
3421
3422 return false;
3423 }
3424
3425 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3426
3427 bool
3428 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3429 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3430 {
3431 return TYPE_CI_CO_LIST (t) == cico_list
3432 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3433 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3434 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3435 }
3436 \f
3437 /* EXP is an expression for the size of an object. If this size contains
3438 discriminant references, replace them with the maximum (if MAX_P) or
3439 minimum (if !MAX_P) possible value of the discriminant. */
3440
3441 tree
3442 max_size (tree exp, bool max_p)
3443 {
3444 enum tree_code code = TREE_CODE (exp);
3445 tree type = TREE_TYPE (exp);
3446
3447 switch (TREE_CODE_CLASS (code))
3448 {
3449 case tcc_declaration:
3450 case tcc_constant:
3451 return exp;
3452
3453 case tcc_vl_exp:
3454 if (code == CALL_EXPR)
3455 {
3456 tree t, *argarray;
3457 int n, i;
3458
3459 t = maybe_inline_call_in_expr (exp);
3460 if (t)
3461 return max_size (t, max_p);
3462
3463 n = call_expr_nargs (exp);
3464 gcc_assert (n > 0);
3465 argarray = XALLOCAVEC (tree, n);
3466 for (i = 0; i < n; i++)
3467 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3468 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3469 }
3470 break;
3471
3472 case tcc_reference:
3473 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3474 modify. Otherwise, we treat it like a variable. */
3475 if (CONTAINS_PLACEHOLDER_P (exp))
3476 {
3477 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3478 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3479 return max_size (convert (get_base_type (val_type), val), true);
3480 }
3481
3482 return exp;
3483
3484 case tcc_comparison:
3485 return max_p ? size_one_node : size_zero_node;
3486
3487 case tcc_unary:
3488 if (code == NON_LVALUE_EXPR)
3489 return max_size (TREE_OPERAND (exp, 0), max_p);
3490
3491 return fold_build1 (code, type,
3492 max_size (TREE_OPERAND (exp, 0),
3493 code == NEGATE_EXPR ? !max_p : max_p));
3494
3495 case tcc_binary:
3496 {
3497 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3498 tree rhs = max_size (TREE_OPERAND (exp, 1),
3499 code == MINUS_EXPR ? !max_p : max_p);
3500
3501 /* Special-case wanting the maximum value of a MIN_EXPR.
3502 In that case, if one side overflows, return the other. */
3503 if (max_p && code == MIN_EXPR)
3504 {
3505 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3506 return lhs;
3507
3508 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3509 return rhs;
3510 }
3511
3512 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3513 overflowing and the RHS a variable. */
3514 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3515 && TREE_CODE (lhs) == INTEGER_CST
3516 && TREE_OVERFLOW (lhs)
3517 && TREE_CODE (rhs) != INTEGER_CST)
3518 return lhs;
3519
3520 /* If we are going to subtract a "negative" value in an unsigned type,
3521 do the operation as an addition of the negated value, in order to
3522 avoid creating a spurious overflow below. */
3523 if (code == MINUS_EXPR
3524 && TYPE_UNSIGNED (type)
3525 && TREE_CODE (rhs) == INTEGER_CST
3526 && !TREE_OVERFLOW (rhs)
3527 && tree_int_cst_sign_bit (rhs) != 0)
3528 {
3529 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3530 code = PLUS_EXPR;
3531 }
3532
3533 /* We need to detect overflows so we call size_binop here. */
3534 return size_binop (code, lhs, rhs);
3535 }
3536
3537 case tcc_expression:
3538 switch (TREE_CODE_LENGTH (code))
3539 {
3540 case 1:
3541 if (code == SAVE_EXPR)
3542 return exp;
3543
3544 return fold_build1 (code, type,
3545 max_size (TREE_OPERAND (exp, 0), max_p));
3546
3547 case 2:
3548 if (code == COMPOUND_EXPR)
3549 return max_size (TREE_OPERAND (exp, 1), max_p);
3550
3551 return fold_build2 (code, type,
3552 max_size (TREE_OPERAND (exp, 0), max_p),
3553 max_size (TREE_OPERAND (exp, 1), max_p));
3554
3555 case 3:
3556 if (code == COND_EXPR)
3557 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3558 max_size (TREE_OPERAND (exp, 1), max_p),
3559 max_size (TREE_OPERAND (exp, 2), max_p));
3560
3561 default:
3562 break;
3563 }
3564
3565 /* Other tree classes cannot happen. */
3566 default:
3567 break;
3568 }
3569
3570 gcc_unreachable ();
3571 }
3572 \f
3573 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3574 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3575 Return a constructor for the template. */
3576
3577 tree
3578 build_template (tree template_type, tree array_type, tree expr)
3579 {
3580 vec<constructor_elt, va_gc> *template_elts = NULL;
3581 tree bound_list = NULL_TREE;
3582 tree field;
3583
3584 while (TREE_CODE (array_type) == RECORD_TYPE
3585 && (TYPE_PADDING_P (array_type)
3586 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3587 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3588
3589 if (TREE_CODE (array_type) == ARRAY_TYPE
3590 || (TREE_CODE (array_type) == INTEGER_TYPE
3591 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3592 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3593
3594 /* First make the list for a CONSTRUCTOR for the template. Go down the
3595 field list of the template instead of the type chain because this
3596 array might be an Ada array of arrays and we can't tell where the
3597 nested arrays stop being the underlying object. */
3598
3599 for (field = TYPE_FIELDS (template_type); field;
3600 (bound_list
3601 ? (bound_list = TREE_CHAIN (bound_list))
3602 : (array_type = TREE_TYPE (array_type))),
3603 field = DECL_CHAIN (DECL_CHAIN (field)))
3604 {
3605 tree bounds, min, max;
3606
3607 /* If we have a bound list, get the bounds from there. Likewise
3608 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3609 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3610 This will give us a maximum range. */
3611 if (bound_list)
3612 bounds = TREE_VALUE (bound_list);
3613 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3614 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3615 else if (expr && TREE_CODE (expr) == PARM_DECL
3616 && DECL_BY_COMPONENT_PTR_P (expr))
3617 bounds = TREE_TYPE (field);
3618 else
3619 gcc_unreachable ();
3620
3621 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3622 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3623
3624 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3625 substitute it from OBJECT. */
3626 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3627 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3628
3629 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3630 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3631 }
3632
3633 return gnat_build_constructor (template_type, template_elts);
3634 }
3635 \f
3636 /* Return true if TYPE is suitable for the element type of a vector. */
3637
3638 static bool
3639 type_for_vector_element_p (tree type)
3640 {
3641 machine_mode mode;
3642
3643 if (!INTEGRAL_TYPE_P (type)
3644 && !SCALAR_FLOAT_TYPE_P (type)
3645 && !FIXED_POINT_TYPE_P (type))
3646 return false;
3647
3648 mode = TYPE_MODE (type);
3649 if (GET_MODE_CLASS (mode) != MODE_INT
3650 && !SCALAR_FLOAT_MODE_P (mode)
3651 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3652 return false;
3653
3654 return true;
3655 }
3656
3657 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3658 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3659 attribute declaration and want to issue error messages on failure. */
3660
3661 static tree
3662 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3663 {
3664 unsigned HOST_WIDE_INT size_int, inner_size_int;
3665 int nunits;
3666
3667 /* Silently punt on variable sizes. We can't make vector types for them,
3668 need to ignore them on front-end generated subtypes of unconstrained
3669 base types, and this attribute is for binding implementors, not end
3670 users, so we should never get there from legitimate explicit uses. */
3671 if (!tree_fits_uhwi_p (size))
3672 return NULL_TREE;
3673 size_int = tree_to_uhwi (size);
3674
3675 if (!type_for_vector_element_p (inner_type))
3676 {
3677 if (attribute)
3678 error ("invalid element type for attribute %qs",
3679 IDENTIFIER_POINTER (attribute));
3680 return NULL_TREE;
3681 }
3682 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3683
3684 if (size_int % inner_size_int)
3685 {
3686 if (attribute)
3687 error ("vector size not an integral multiple of component size");
3688 return NULL_TREE;
3689 }
3690
3691 if (size_int == 0)
3692 {
3693 if (attribute)
3694 error ("zero vector size");
3695 return NULL_TREE;
3696 }
3697
3698 nunits = size_int / inner_size_int;
3699 if (nunits & (nunits - 1))
3700 {
3701 if (attribute)
3702 error ("number of components of vector not a power of two");
3703 return NULL_TREE;
3704 }
3705
3706 return build_vector_type (inner_type, nunits);
3707 }
3708
3709 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3710 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3711 processing the attribute and want to issue error messages on failure. */
3712
3713 static tree
3714 build_vector_type_for_array (tree array_type, tree attribute)
3715 {
3716 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3717 TYPE_SIZE_UNIT (array_type),
3718 attribute);
3719 if (!vector_type)
3720 return NULL_TREE;
3721
3722 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3723 return vector_type;
3724 }
3725 \f
3726 /* Build a type to be used to represent an aliased object whose nominal type
3727 is an unconstrained array. This consists of a RECORD_TYPE containing a
3728 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3729 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3730 an arbitrary unconstrained object. Use NAME as the name of the record.
3731 DEBUG_INFO_P is true if we need to write debug information for the type. */
3732
3733 tree
3734 build_unc_object_type (tree template_type, tree object_type, tree name,
3735 bool debug_info_p)
3736 {
3737 tree decl;
3738 tree type = make_node (RECORD_TYPE);
3739 tree template_field
3740 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3741 NULL_TREE, NULL_TREE, 0, 1);
3742 tree array_field
3743 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3744 NULL_TREE, NULL_TREE, 0, 1);
3745
3746 TYPE_NAME (type) = name;
3747 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3748 DECL_CHAIN (template_field) = array_field;
3749 finish_record_type (type, template_field, 0, true);
3750
3751 /* Declare it now since it will never be declared otherwise. This is
3752 necessary to ensure that its subtrees are properly marked. */
3753 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3754
3755 /* template_type will not be used elsewhere than here, so to keep the debug
3756 info clean and in order to avoid scoping issues, make decl its
3757 context. */
3758 gnat_set_type_context (template_type, decl);
3759
3760 return type;
3761 }
3762
3763 /* Same, taking a thin or fat pointer type instead of a template type. */
3764
3765 tree
3766 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3767 tree name, bool debug_info_p)
3768 {
3769 tree template_type;
3770
3771 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3772
3773 template_type
3774 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3775 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3776 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3777
3778 return
3779 build_unc_object_type (template_type, object_type, name, debug_info_p);
3780 }
3781 \f
3782 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3783 In the normal case this is just two adjustments, but we have more to
3784 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3785
3786 void
3787 update_pointer_to (tree old_type, tree new_type)
3788 {
3789 tree ptr = TYPE_POINTER_TO (old_type);
3790 tree ref = TYPE_REFERENCE_TO (old_type);
3791 tree t;
3792
3793 /* If this is the main variant, process all the other variants first. */
3794 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3795 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3796 update_pointer_to (t, new_type);
3797
3798 /* If no pointers and no references, we are done. */
3799 if (!ptr && !ref)
3800 return;
3801
3802 /* Merge the old type qualifiers in the new type.
3803
3804 Each old variant has qualifiers for specific reasons, and the new
3805 designated type as well. Each set of qualifiers represents useful
3806 information grabbed at some point, and merging the two simply unifies
3807 these inputs into the final type description.
3808
3809 Consider for instance a volatile type frozen after an access to constant
3810 type designating it; after the designated type's freeze, we get here with
3811 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3812 when the access type was processed. We will make a volatile and readonly
3813 designated type, because that's what it really is.
3814
3815 We might also get here for a non-dummy OLD_TYPE variant with different
3816 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3817 to private record type elaboration (see the comments around the call to
3818 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3819 the qualifiers in those cases too, to avoid accidentally discarding the
3820 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3821 new_type
3822 = build_qualified_type (new_type,
3823 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3824
3825 /* If old type and new type are identical, there is nothing to do. */
3826 if (old_type == new_type)
3827 return;
3828
3829 /* Otherwise, first handle the simple case. */
3830 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3831 {
3832 tree new_ptr, new_ref;
3833
3834 /* If pointer or reference already points to new type, nothing to do.
3835 This can happen as update_pointer_to can be invoked multiple times
3836 on the same couple of types because of the type variants. */
3837 if ((ptr && TREE_TYPE (ptr) == new_type)
3838 || (ref && TREE_TYPE (ref) == new_type))
3839 return;
3840
3841 /* Chain PTR and its variants at the end. */
3842 new_ptr = TYPE_POINTER_TO (new_type);
3843 if (new_ptr)
3844 {
3845 while (TYPE_NEXT_PTR_TO (new_ptr))
3846 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3847 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3848 }
3849 else
3850 TYPE_POINTER_TO (new_type) = ptr;
3851
3852 /* Now adjust them. */
3853 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3854 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3855 {
3856 TREE_TYPE (t) = new_type;
3857 if (TYPE_NULL_BOUNDS (t))
3858 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3859 }
3860
3861 /* Chain REF and its variants at the end. */
3862 new_ref = TYPE_REFERENCE_TO (new_type);
3863 if (new_ref)
3864 {
3865 while (TYPE_NEXT_REF_TO (new_ref))
3866 new_ref = TYPE_NEXT_REF_TO (new_ref);
3867 TYPE_NEXT_REF_TO (new_ref) = ref;
3868 }
3869 else
3870 TYPE_REFERENCE_TO (new_type) = ref;
3871
3872 /* Now adjust them. */
3873 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3874 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3875 TREE_TYPE (t) = new_type;
3876
3877 TYPE_POINTER_TO (old_type) = NULL_TREE;
3878 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3879 }
3880
3881 /* Now deal with the unconstrained array case. In this case the pointer
3882 is actually a record where both fields are pointers to dummy nodes.
3883 Turn them into pointers to the correct types using update_pointer_to.
3884 Likewise for the pointer to the object record (thin pointer). */
3885 else
3886 {
3887 tree new_ptr = TYPE_POINTER_TO (new_type);
3888
3889 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3890
3891 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3892 since update_pointer_to can be invoked multiple times on the same
3893 couple of types because of the type variants. */
3894 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3895 return;
3896
3897 update_pointer_to
3898 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3899 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3900
3901 update_pointer_to
3902 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3903 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3904
3905 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3906 TYPE_OBJECT_RECORD_TYPE (new_type));
3907
3908 TYPE_POINTER_TO (old_type) = NULL_TREE;
3909 }
3910 }
3911 \f
3912 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3913 unconstrained one. This involves making or finding a template. */
3914
3915 static tree
3916 convert_to_fat_pointer (tree type, tree expr)
3917 {
3918 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3919 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3920 tree etype = TREE_TYPE (expr);
3921 tree template_addr;
3922 vec<constructor_elt, va_gc> *v;
3923 vec_alloc (v, 2);
3924
3925 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3926 array (compare_fat_pointers ensures that this is the full discriminant)
3927 and a valid pointer to the bounds. This latter property is necessary
3928 since the compiler can hoist the load of the bounds done through it. */
3929 if (integer_zerop (expr))
3930 {
3931 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3932 tree null_bounds, t;
3933
3934 if (TYPE_NULL_BOUNDS (ptr_template_type))
3935 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3936 else
3937 {
3938 /* The template type can still be dummy at this point so we build an
3939 empty constructor. The middle-end will fill it in with zeros. */
3940 t = build_constructor (template_type, NULL);
3941 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3942 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3943 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3944 }
3945
3946 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3947 fold_convert (p_array_type, null_pointer_node));
3948 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3949 t = build_constructor (type, v);
3950 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3951 TREE_CONSTANT (t) = 0;
3952 TREE_STATIC (t) = 1;
3953
3954 return t;
3955 }
3956
3957 /* If EXPR is a thin pointer, make template and data from the record. */
3958 if (TYPE_IS_THIN_POINTER_P (etype))
3959 {
3960 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3961
3962 expr = gnat_protect_expr (expr);
3963
3964 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3965 the thin pointer value has been shifted so we shift it back to get
3966 the template address. */
3967 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3968 {
3969 template_addr
3970 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3971 fold_build1 (NEGATE_EXPR, sizetype,
3972 byte_position
3973 (DECL_CHAIN (field))));
3974 template_addr
3975 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3976 template_addr);
3977 }
3978
3979 /* Otherwise we explicitly take the address of the fields. */
3980 else
3981 {
3982 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3983 template_addr
3984 = build_unary_op (ADDR_EXPR, NULL_TREE,
3985 build_component_ref (expr, field, false));
3986 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3987 build_component_ref (expr, DECL_CHAIN (field),
3988 false));
3989 }
3990 }
3991
3992 /* Otherwise, build the constructor for the template. */
3993 else
3994 template_addr
3995 = build_unary_op (ADDR_EXPR, NULL_TREE,
3996 build_template (template_type, TREE_TYPE (etype),
3997 expr));
3998
3999 /* The final result is a constructor for the fat pointer.
4000
4001 If EXPR is an argument of a foreign convention subprogram, the type it
4002 points to is directly the component type. In this case, the expression
4003 type may not match the corresponding FIELD_DECL type at this point, so we
4004 call "convert" here to fix that up if necessary. This type consistency is
4005 required, for instance because it ensures that possible later folding of
4006 COMPONENT_REFs against this constructor always yields something of the
4007 same type as the initial reference.
4008
4009 Note that the call to "build_template" above is still fine because it
4010 will only refer to the provided TEMPLATE_TYPE in this case. */
4011 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4012 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4013 return gnat_build_constructor (type, v);
4014 }
4015 \f
4016 /* Create an expression whose value is that of EXPR,
4017 converted to type TYPE. The TREE_TYPE of the value
4018 is always TYPE. This function implements all reasonable
4019 conversions; callers should filter out those that are
4020 not permitted by the language being compiled. */
4021
4022 tree
4023 convert (tree type, tree expr)
4024 {
4025 tree etype = TREE_TYPE (expr);
4026 enum tree_code ecode = TREE_CODE (etype);
4027 enum tree_code code = TREE_CODE (type);
4028
4029 /* If the expression is already of the right type, we are done. */
4030 if (etype == type)
4031 return expr;
4032
4033 /* If both input and output have padding and are of variable size, do this
4034 as an unchecked conversion. Likewise if one is a mere variant of the
4035 other, so we avoid a pointless unpad/repad sequence. */
4036 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4037 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4038 && (!TREE_CONSTANT (TYPE_SIZE (type))
4039 || !TREE_CONSTANT (TYPE_SIZE (etype))
4040 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4041 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4042 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4043 ;
4044
4045 /* If the output type has padding, convert to the inner type and make a
4046 constructor to build the record, unless a variable size is involved. */
4047 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4048 {
4049 vec<constructor_elt, va_gc> *v;
4050
4051 /* If we previously converted from another type and our type is
4052 of variable size, remove the conversion to avoid the need for
4053 variable-sized temporaries. Likewise for a conversion between
4054 original and packable version. */
4055 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4056 && (!TREE_CONSTANT (TYPE_SIZE (type))
4057 || (ecode == RECORD_TYPE
4058 && TYPE_NAME (etype)
4059 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4060 expr = TREE_OPERAND (expr, 0);
4061
4062 /* If we are just removing the padding from expr, convert the original
4063 object if we have variable size in order to avoid the need for some
4064 variable-sized temporaries. Likewise if the padding is a variant
4065 of the other, so we avoid a pointless unpad/repad sequence. */
4066 if (TREE_CODE (expr) == COMPONENT_REF
4067 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4068 && (!TREE_CONSTANT (TYPE_SIZE (type))
4069 || TYPE_MAIN_VARIANT (type)
4070 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4071 || (ecode == RECORD_TYPE
4072 && TYPE_NAME (etype)
4073 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4074 return convert (type, TREE_OPERAND (expr, 0));
4075
4076 /* If the inner type is of self-referential size and the expression type
4077 is a record, do this as an unchecked conversion. But first pad the
4078 expression if possible to have the same size on both sides. */
4079 if (ecode == RECORD_TYPE
4080 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4081 {
4082 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4083 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4084 false, false, false, true),
4085 expr);
4086 return unchecked_convert (type, expr, false);
4087 }
4088
4089 /* If we are converting between array types with variable size, do the
4090 final conversion as an unchecked conversion, again to avoid the need
4091 for some variable-sized temporaries. If valid, this conversion is
4092 very likely purely technical and without real effects. */
4093 if (ecode == ARRAY_TYPE
4094 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4095 && !TREE_CONSTANT (TYPE_SIZE (etype))
4096 && !TREE_CONSTANT (TYPE_SIZE (type)))
4097 return unchecked_convert (type,
4098 convert (TREE_TYPE (TYPE_FIELDS (type)),
4099 expr),
4100 false);
4101
4102 vec_alloc (v, 1);
4103 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4104 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4105 return gnat_build_constructor (type, v);
4106 }
4107
4108 /* If the input type has padding, remove it and convert to the output type.
4109 The conditions ordering is arranged to ensure that the output type is not
4110 a padding type here, as it is not clear whether the conversion would
4111 always be correct if this was to happen. */
4112 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4113 {
4114 tree unpadded;
4115
4116 /* If we have just converted to this padded type, just get the
4117 inner expression. */
4118 if (TREE_CODE (expr) == CONSTRUCTOR)
4119 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4120
4121 /* Otherwise, build an explicit component reference. */
4122 else
4123 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4124
4125 return convert (type, unpadded);
4126 }
4127
4128 /* If the input is a biased type, adjust first. */
4129 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4130 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4131 fold_convert (TREE_TYPE (etype), expr),
4132 fold_convert (TREE_TYPE (etype),
4133 TYPE_MIN_VALUE (etype))));
4134
4135 /* If the input is a justified modular type, we need to extract the actual
4136 object before converting it to any other type with the exceptions of an
4137 unconstrained array or of a mere type variant. It is useful to avoid the
4138 extraction and conversion in the type variant case because it could end
4139 up replacing a VAR_DECL expr by a constructor and we might be about the
4140 take the address of the result. */
4141 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4142 && code != UNCONSTRAINED_ARRAY_TYPE
4143 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4144 return
4145 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4146
4147 /* If converting to a type that contains a template, convert to the data
4148 type and then build the template. */
4149 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4150 {
4151 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4152 vec<constructor_elt, va_gc> *v;
4153 vec_alloc (v, 2);
4154
4155 /* If the source already has a template, get a reference to the
4156 associated array only, as we are going to rebuild a template
4157 for the target type anyway. */
4158 expr = maybe_unconstrained_array (expr);
4159
4160 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4161 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4162 obj_type, NULL_TREE));
4163 if (expr)
4164 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4165 convert (obj_type, expr));
4166 return gnat_build_constructor (type, v);
4167 }
4168
4169 /* There are some cases of expressions that we process specially. */
4170 switch (TREE_CODE (expr))
4171 {
4172 case ERROR_MARK:
4173 return expr;
4174
4175 case NULL_EXPR:
4176 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4177 conversion in gnat_expand_expr. NULL_EXPR does not represent
4178 and actual value, so no conversion is needed. */
4179 expr = copy_node (expr);
4180 TREE_TYPE (expr) = type;
4181 return expr;
4182
4183 case STRING_CST:
4184 /* If we are converting a STRING_CST to another constrained array type,
4185 just make a new one in the proper type. */
4186 if (code == ecode && AGGREGATE_TYPE_P (etype)
4187 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4188 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4189 {
4190 expr = copy_node (expr);
4191 TREE_TYPE (expr) = type;
4192 return expr;
4193 }
4194 break;
4195
4196 case VECTOR_CST:
4197 /* If we are converting a VECTOR_CST to a mere type variant, just make
4198 a new one in the proper type. */
4199 if (code == ecode && gnat_types_compatible_p (type, etype))
4200 {
4201 expr = copy_node (expr);
4202 TREE_TYPE (expr) = type;
4203 return expr;
4204 }
4205
4206 case CONSTRUCTOR:
4207 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4208 another padding type around the same type, just make a new one in
4209 the proper type. */
4210 if (code == ecode
4211 && (gnat_types_compatible_p (type, etype)
4212 || (code == RECORD_TYPE
4213 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4214 && TREE_TYPE (TYPE_FIELDS (type))
4215 == TREE_TYPE (TYPE_FIELDS (etype)))))
4216 {
4217 expr = copy_node (expr);
4218 TREE_TYPE (expr) = type;
4219 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4220 return expr;
4221 }
4222
4223 /* Likewise for a conversion between original and packable version, or
4224 conversion between types of the same size and with the same list of
4225 fields, but we have to work harder to preserve type consistency. */
4226 if (code == ecode
4227 && code == RECORD_TYPE
4228 && (TYPE_NAME (type) == TYPE_NAME (etype)
4229 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4230
4231 {
4232 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4233 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4234 vec<constructor_elt, va_gc> *v;
4235 vec_alloc (v, len);
4236 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4237 unsigned HOST_WIDE_INT idx;
4238 tree index, value;
4239
4240 /* Whether we need to clear TREE_CONSTANT et al. on the output
4241 constructor when we convert in place. */
4242 bool clear_constant = false;
4243
4244 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4245 {
4246 /* Skip the missing fields in the CONSTRUCTOR. */
4247 while (efield && field && !SAME_FIELD_P (efield, index))
4248 {
4249 efield = DECL_CHAIN (efield);
4250 field = DECL_CHAIN (field);
4251 }
4252 /* The field must be the same. */
4253 if (!(efield && field && SAME_FIELD_P (efield, field)))
4254 break;
4255 constructor_elt elt
4256 = {field, convert (TREE_TYPE (field), value)};
4257 v->quick_push (elt);
4258
4259 /* If packing has made this field a bitfield and the input
4260 value couldn't be emitted statically any more, we need to
4261 clear TREE_CONSTANT on our output. */
4262 if (!clear_constant
4263 && TREE_CONSTANT (expr)
4264 && !CONSTRUCTOR_BITFIELD_P (efield)
4265 && CONSTRUCTOR_BITFIELD_P (field)
4266 && !initializer_constant_valid_for_bitfield_p (value))
4267 clear_constant = true;
4268
4269 efield = DECL_CHAIN (efield);
4270 field = DECL_CHAIN (field);
4271 }
4272
4273 /* If we have been able to match and convert all the input fields
4274 to their output type, convert in place now. We'll fallback to a
4275 view conversion downstream otherwise. */
4276 if (idx == len)
4277 {
4278 expr = copy_node (expr);
4279 TREE_TYPE (expr) = type;
4280 CONSTRUCTOR_ELTS (expr) = v;
4281 if (clear_constant)
4282 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4283 return expr;
4284 }
4285 }
4286
4287 /* Likewise for a conversion between array type and vector type with a
4288 compatible representative array. */
4289 else if (code == VECTOR_TYPE
4290 && ecode == ARRAY_TYPE
4291 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4292 etype))
4293 {
4294 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4295 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4296 vec<constructor_elt, va_gc> *v;
4297 unsigned HOST_WIDE_INT ix;
4298 tree value;
4299
4300 /* Build a VECTOR_CST from a *constant* array constructor. */
4301 if (TREE_CONSTANT (expr))
4302 {
4303 bool constant_p = true;
4304
4305 /* Iterate through elements and check if all constructor
4306 elements are *_CSTs. */
4307 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4308 if (!CONSTANT_CLASS_P (value))
4309 {
4310 constant_p = false;
4311 break;
4312 }
4313
4314 if (constant_p)
4315 return build_vector_from_ctor (type,
4316 CONSTRUCTOR_ELTS (expr));
4317 }
4318
4319 /* Otherwise, build a regular vector constructor. */
4320 vec_alloc (v, len);
4321 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4322 {
4323 constructor_elt elt = {NULL_TREE, value};
4324 v->quick_push (elt);
4325 }
4326 expr = copy_node (expr);
4327 TREE_TYPE (expr) = type;
4328 CONSTRUCTOR_ELTS (expr) = v;
4329 return expr;
4330 }
4331 break;
4332
4333 case UNCONSTRAINED_ARRAY_REF:
4334 /* First retrieve the underlying array. */
4335 expr = maybe_unconstrained_array (expr);
4336 etype = TREE_TYPE (expr);
4337 ecode = TREE_CODE (etype);
4338 break;
4339
4340 case VIEW_CONVERT_EXPR:
4341 {
4342 /* GCC 4.x is very sensitive to type consistency overall, and view
4343 conversions thus are very frequent. Even though just "convert"ing
4344 the inner operand to the output type is fine in most cases, it
4345 might expose unexpected input/output type mismatches in special
4346 circumstances so we avoid such recursive calls when we can. */
4347 tree op0 = TREE_OPERAND (expr, 0);
4348
4349 /* If we are converting back to the original type, we can just
4350 lift the input conversion. This is a common occurrence with
4351 switches back-and-forth amongst type variants. */
4352 if (type == TREE_TYPE (op0))
4353 return op0;
4354
4355 /* Otherwise, if we're converting between two aggregate or vector
4356 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4357 target type in place or to just convert the inner expression. */
4358 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4359 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4360 {
4361 /* If we are converting between mere variants, we can just
4362 substitute the VIEW_CONVERT_EXPR in place. */
4363 if (gnat_types_compatible_p (type, etype))
4364 return build1 (VIEW_CONVERT_EXPR, type, op0);
4365
4366 /* Otherwise, we may just bypass the input view conversion unless
4367 one of the types is a fat pointer, which is handled by
4368 specialized code below which relies on exact type matching. */
4369 else if (!TYPE_IS_FAT_POINTER_P (type)
4370 && !TYPE_IS_FAT_POINTER_P (etype))
4371 return convert (type, op0);
4372 }
4373
4374 break;
4375 }
4376
4377 default:
4378 break;
4379 }
4380
4381 /* Check for converting to a pointer to an unconstrained array. */
4382 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4383 return convert_to_fat_pointer (type, expr);
4384
4385 /* If we are converting between two aggregate or vector types that are mere
4386 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4387 to a vector type from its representative array type. */
4388 else if ((code == ecode
4389 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4390 && gnat_types_compatible_p (type, etype))
4391 || (code == VECTOR_TYPE
4392 && ecode == ARRAY_TYPE
4393 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4394 etype)))
4395 return build1 (VIEW_CONVERT_EXPR, type, expr);
4396
4397 /* If we are converting between tagged types, try to upcast properly. */
4398 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4399 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4400 {
4401 tree child_etype = etype;
4402 do {
4403 tree field = TYPE_FIELDS (child_etype);
4404 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4405 return build_component_ref (expr, field, false);
4406 child_etype = TREE_TYPE (field);
4407 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4408 }
4409
4410 /* If we are converting from a smaller form of record type back to it, just
4411 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4412 size on both sides. */
4413 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4414 && smaller_form_type_p (etype, type))
4415 {
4416 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4417 false, false, false, true),
4418 expr);
4419 return build1 (VIEW_CONVERT_EXPR, type, expr);
4420 }
4421
4422 /* In all other cases of related types, make a NOP_EXPR. */
4423 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4424 return fold_convert (type, expr);
4425
4426 switch (code)
4427 {
4428 case VOID_TYPE:
4429 return fold_build1 (CONVERT_EXPR, type, expr);
4430
4431 case INTEGER_TYPE:
4432 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4433 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4434 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4435 return unchecked_convert (type, expr, false);
4436 else if (TYPE_BIASED_REPRESENTATION_P (type))
4437 return fold_convert (type,
4438 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4439 convert (TREE_TYPE (type), expr),
4440 convert (TREE_TYPE (type),
4441 TYPE_MIN_VALUE (type))));
4442
4443 /* ... fall through ... */
4444
4445 case ENUMERAL_TYPE:
4446 case BOOLEAN_TYPE:
4447 /* If we are converting an additive expression to an integer type
4448 with lower precision, be wary of the optimization that can be
4449 applied by convert_to_integer. There are 2 problematic cases:
4450 - if the first operand was originally of a biased type,
4451 because we could be recursively called to convert it
4452 to an intermediate type and thus rematerialize the
4453 additive operator endlessly,
4454 - if the expression contains a placeholder, because an
4455 intermediate conversion that changes the sign could
4456 be inserted and thus introduce an artificial overflow
4457 at compile time when the placeholder is substituted. */
4458 if (code == INTEGER_TYPE
4459 && ecode == INTEGER_TYPE
4460 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4461 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4462 {
4463 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4464
4465 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4466 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4467 || CONTAINS_PLACEHOLDER_P (expr))
4468 return build1 (NOP_EXPR, type, expr);
4469 }
4470
4471 return fold (convert_to_integer (type, expr));
4472
4473 case POINTER_TYPE:
4474 case REFERENCE_TYPE:
4475 /* If converting between two thin pointers, adjust if needed to account
4476 for differing offsets from the base pointer, depending on whether
4477 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4478 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4479 {
4480 tree etype_pos
4481 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4482 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4483 : size_zero_node;
4484 tree type_pos
4485 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4486 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4487 : size_zero_node;
4488 tree byte_diff = size_diffop (type_pos, etype_pos);
4489
4490 expr = build1 (NOP_EXPR, type, expr);
4491 if (integer_zerop (byte_diff))
4492 return expr;
4493
4494 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4495 fold_convert (sizetype, byte_diff));
4496 }
4497
4498 /* If converting fat pointer to normal or thin pointer, get the pointer
4499 to the array and then convert it. */
4500 if (TYPE_IS_FAT_POINTER_P (etype))
4501 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4502
4503 return fold (convert_to_pointer (type, expr));
4504
4505 case REAL_TYPE:
4506 return fold (convert_to_real (type, expr));
4507
4508 case RECORD_TYPE:
4509 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4510 {
4511 vec<constructor_elt, va_gc> *v;
4512 vec_alloc (v, 1);
4513
4514 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4515 convert (TREE_TYPE (TYPE_FIELDS (type)),
4516 expr));
4517 return gnat_build_constructor (type, v);
4518 }
4519
4520 /* ... fall through ... */
4521
4522 case ARRAY_TYPE:
4523 /* In these cases, assume the front-end has validated the conversion.
4524 If the conversion is valid, it will be a bit-wise conversion, so
4525 it can be viewed as an unchecked conversion. */
4526 return unchecked_convert (type, expr, false);
4527
4528 case UNION_TYPE:
4529 /* This is a either a conversion between a tagged type and some
4530 subtype, which we have to mark as a UNION_TYPE because of
4531 overlapping fields or a conversion of an Unchecked_Union. */
4532 return unchecked_convert (type, expr, false);
4533
4534 case UNCONSTRAINED_ARRAY_TYPE:
4535 /* If the input is a VECTOR_TYPE, convert to the representative
4536 array type first. */
4537 if (ecode == VECTOR_TYPE)
4538 {
4539 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4540 etype = TREE_TYPE (expr);
4541 ecode = TREE_CODE (etype);
4542 }
4543
4544 /* If EXPR is a constrained array, take its address, convert it to a
4545 fat pointer, and then dereference it. Likewise if EXPR is a
4546 record containing both a template and a constrained array.
4547 Note that a record representing a justified modular type
4548 always represents a packed constrained array. */
4549 if (ecode == ARRAY_TYPE
4550 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4551 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4552 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4553 return
4554 build_unary_op
4555 (INDIRECT_REF, NULL_TREE,
4556 convert_to_fat_pointer (TREE_TYPE (type),
4557 build_unary_op (ADDR_EXPR,
4558 NULL_TREE, expr)));
4559
4560 /* Do something very similar for converting one unconstrained
4561 array to another. */
4562 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4563 return
4564 build_unary_op (INDIRECT_REF, NULL_TREE,
4565 convert (TREE_TYPE (type),
4566 build_unary_op (ADDR_EXPR,
4567 NULL_TREE, expr)));
4568 else
4569 gcc_unreachable ();
4570
4571 case COMPLEX_TYPE:
4572 return fold (convert_to_complex (type, expr));
4573
4574 default:
4575 gcc_unreachable ();
4576 }
4577 }
4578
4579 /* Create an expression whose value is that of EXPR converted to the common
4580 index type, which is sizetype. EXPR is supposed to be in the base type
4581 of the GNAT index type. Calling it is equivalent to doing
4582
4583 convert (sizetype, expr)
4584
4585 but we try to distribute the type conversion with the knowledge that EXPR
4586 cannot overflow in its type. This is a best-effort approach and we fall
4587 back to the above expression as soon as difficulties are encountered.
4588
4589 This is necessary to overcome issues that arise when the GNAT base index
4590 type and the GCC common index type (sizetype) don't have the same size,
4591 which is quite frequent on 64-bit architectures. In this case, and if
4592 the GNAT base index type is signed but the iteration type of the loop has
4593 been forced to unsigned, the loop scalar evolution engine cannot compute
4594 a simple evolution for the general induction variables associated with the
4595 array indices, because it will preserve the wrap-around semantics in the
4596 unsigned type of their "inner" part. As a result, many loop optimizations
4597 are blocked.
4598
4599 The solution is to use a special (basic) induction variable that is at
4600 least as large as sizetype, and to express the aforementioned general
4601 induction variables in terms of this induction variable, eliminating
4602 the problematic intermediate truncation to the GNAT base index type.
4603 This is possible as long as the original expression doesn't overflow
4604 and if the middle-end hasn't introduced artificial overflows in the
4605 course of the various simplification it can make to the expression. */
4606
4607 tree
4608 convert_to_index_type (tree expr)
4609 {
4610 enum tree_code code = TREE_CODE (expr);
4611 tree type = TREE_TYPE (expr);
4612
4613 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4614 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4615 if (TYPE_UNSIGNED (type) || !optimize)
4616 return convert (sizetype, expr);
4617
4618 switch (code)
4619 {
4620 case VAR_DECL:
4621 /* The main effect of the function: replace a loop parameter with its
4622 associated special induction variable. */
4623 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4624 expr = DECL_INDUCTION_VAR (expr);
4625 break;
4626
4627 CASE_CONVERT:
4628 {
4629 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4630 /* Bail out as soon as we suspect some sort of type frobbing. */
4631 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4632 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4633 break;
4634 }
4635
4636 /* ... fall through ... */
4637
4638 case NON_LVALUE_EXPR:
4639 return fold_build1 (code, sizetype,
4640 convert_to_index_type (TREE_OPERAND (expr, 0)));
4641
4642 case PLUS_EXPR:
4643 case MINUS_EXPR:
4644 case MULT_EXPR:
4645 return fold_build2 (code, sizetype,
4646 convert_to_index_type (TREE_OPERAND (expr, 0)),
4647 convert_to_index_type (TREE_OPERAND (expr, 1)));
4648
4649 case COMPOUND_EXPR:
4650 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4651 convert_to_index_type (TREE_OPERAND (expr, 1)));
4652
4653 case COND_EXPR:
4654 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4655 convert_to_index_type (TREE_OPERAND (expr, 1)),
4656 convert_to_index_type (TREE_OPERAND (expr, 2)));
4657
4658 default:
4659 break;
4660 }
4661
4662 return convert (sizetype, expr);
4663 }
4664 \f
4665 /* Remove all conversions that are done in EXP. This includes converting
4666 from a padded type or to a justified modular type. If TRUE_ADDRESS
4667 is true, always return the address of the containing object even if
4668 the address is not bit-aligned. */
4669
4670 tree
4671 remove_conversions (tree exp, bool true_address)
4672 {
4673 switch (TREE_CODE (exp))
4674 {
4675 case CONSTRUCTOR:
4676 if (true_address
4677 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4678 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4679 return
4680 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4681 break;
4682
4683 case COMPONENT_REF:
4684 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4685 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4686 break;
4687
4688 CASE_CONVERT:
4689 case VIEW_CONVERT_EXPR:
4690 case NON_LVALUE_EXPR:
4691 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4692
4693 default:
4694 break;
4695 }
4696
4697 return exp;
4698 }
4699 \f
4700 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4701 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4702 likewise return an expression pointing to the underlying array. */
4703
4704 tree
4705 maybe_unconstrained_array (tree exp)
4706 {
4707 enum tree_code code = TREE_CODE (exp);
4708 tree type = TREE_TYPE (exp);
4709
4710 switch (TREE_CODE (type))
4711 {
4712 case UNCONSTRAINED_ARRAY_TYPE:
4713 if (code == UNCONSTRAINED_ARRAY_REF)
4714 {
4715 const bool read_only = TREE_READONLY (exp);
4716 const bool no_trap = TREE_THIS_NOTRAP (exp);
4717
4718 exp = TREE_OPERAND (exp, 0);
4719 type = TREE_TYPE (exp);
4720
4721 if (TREE_CODE (exp) == COND_EXPR)
4722 {
4723 tree op1
4724 = build_unary_op (INDIRECT_REF, NULL_TREE,
4725 build_component_ref (TREE_OPERAND (exp, 1),
4726 TYPE_FIELDS (type),
4727 false));
4728 tree op2
4729 = build_unary_op (INDIRECT_REF, NULL_TREE,
4730 build_component_ref (TREE_OPERAND (exp, 2),
4731 TYPE_FIELDS (type),
4732 false));
4733
4734 exp = build3 (COND_EXPR,
4735 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4736 TREE_OPERAND (exp, 0), op1, op2);
4737 }
4738 else
4739 {
4740 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4741 build_component_ref (exp,
4742 TYPE_FIELDS (type),
4743 false));
4744 TREE_READONLY (exp) = read_only;
4745 TREE_THIS_NOTRAP (exp) = no_trap;
4746 }
4747 }
4748
4749 else if (code == NULL_EXPR)
4750 exp = build1 (NULL_EXPR,
4751 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4752 TREE_OPERAND (exp, 0));
4753 break;
4754
4755 case RECORD_TYPE:
4756 /* If this is a padded type and it contains a template, convert to the
4757 unpadded type first. */
4758 if (TYPE_PADDING_P (type)
4759 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4760 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4761 {
4762 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4763 code = TREE_CODE (exp);
4764 type = TREE_TYPE (exp);
4765 }
4766
4767 if (TYPE_CONTAINS_TEMPLATE_P (type))
4768 {
4769 /* If the array initializer is a box, return NULL_TREE. */
4770 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4771 return NULL_TREE;
4772
4773 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4774 false);
4775 type = TREE_TYPE (exp);
4776
4777 /* If the array type is padded, convert to the unpadded type. */
4778 if (TYPE_IS_PADDING_P (type))
4779 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4780 }
4781 break;
4782
4783 default:
4784 break;
4785 }
4786
4787 return exp;
4788 }
4789 \f
4790 /* Return true if EXPR is an expression that can be folded as an operand
4791 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4792
4793 static bool
4794 can_fold_for_view_convert_p (tree expr)
4795 {
4796 tree t1, t2;
4797
4798 /* The folder will fold NOP_EXPRs between integral types with the same
4799 precision (in the middle-end's sense). We cannot allow it if the
4800 types don't have the same precision in the Ada sense as well. */
4801 if (TREE_CODE (expr) != NOP_EXPR)
4802 return true;
4803
4804 t1 = TREE_TYPE (expr);
4805 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4806
4807 /* Defer to the folder for non-integral conversions. */
4808 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4809 return true;
4810
4811 /* Only fold conversions that preserve both precisions. */
4812 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4813 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4814 return true;
4815
4816 return false;
4817 }
4818
4819 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4820 If NOTRUNC_P is true, truncation operations should be suppressed.
4821
4822 Special care is required with (source or target) integral types whose
4823 precision is not equal to their size, to make sure we fetch or assign
4824 the value bits whose location might depend on the endianness, e.g.
4825
4826 Rmsize : constant := 8;
4827 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4828
4829 type Bit_Array is array (1 .. Rmsize) of Boolean;
4830 pragma Pack (Bit_Array);
4831
4832 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4833
4834 Value : Int := 2#1000_0001#;
4835 Vbits : Bit_Array := To_Bit_Array (Value);
4836
4837 we expect the 8 bits at Vbits'Address to always contain Value, while
4838 their original location depends on the endianness, at Value'Address
4839 on a little-endian architecture but not on a big-endian one. */
4840
4841 tree
4842 unchecked_convert (tree type, tree expr, bool notrunc_p)
4843 {
4844 tree etype = TREE_TYPE (expr);
4845 enum tree_code ecode = TREE_CODE (etype);
4846 enum tree_code code = TREE_CODE (type);
4847 tree tem;
4848 int c;
4849
4850 /* If the expression is already of the right type, we are done. */
4851 if (etype == type)
4852 return expr;
4853
4854 /* If both types are integral just do a normal conversion.
4855 Likewise for a conversion to an unconstrained array. */
4856 if (((INTEGRAL_TYPE_P (type)
4857 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4858 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4859 && (INTEGRAL_TYPE_P (etype)
4860 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4861 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4862 || code == UNCONSTRAINED_ARRAY_TYPE)
4863 {
4864 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4865 {
4866 tree ntype = copy_type (etype);
4867 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4868 TYPE_MAIN_VARIANT (ntype) = ntype;
4869 expr = build1 (NOP_EXPR, ntype, expr);
4870 }
4871
4872 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4873 {
4874 tree rtype = copy_type (type);
4875 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4876 TYPE_MAIN_VARIANT (rtype) = rtype;
4877 expr = convert (rtype, expr);
4878 expr = build1 (NOP_EXPR, type, expr);
4879 }
4880 else
4881 expr = convert (type, expr);
4882 }
4883
4884 /* If we are converting to an integral type whose precision is not equal
4885 to its size, first unchecked convert to a record type that contains a
4886 field of the given precision. Then extract the result from the field.
4887
4888 There is a subtlety if the source type is an aggregate type with reverse
4889 storage order because its representation is not contiguous in the native
4890 storage order, i.e. a direct unchecked conversion to an integral type
4891 with N bits of precision cannot read the first N bits of the aggregate
4892 type. To overcome it, we do an unchecked conversion to an integral type
4893 with reverse storage order and return the resulting value. This also
4894 ensures that the result of the unchecked conversion doesn't depend on
4895 the endianness of the target machine, but only on the storage order of
4896 the aggregate type.
4897
4898 Finally, for the sake of consistency, we do the unchecked conversion
4899 to an integral type with reverse storage order as soon as the source
4900 type is an aggregate type with reverse storage order, even if there
4901 are no considerations of precision or size involved. */
4902 else if (INTEGRAL_TYPE_P (type)
4903 && TYPE_RM_SIZE (type)
4904 && (0 != compare_tree_int (TYPE_RM_SIZE (type),
4905 GET_MODE_BITSIZE (TYPE_MODE (type)))
4906 || (AGGREGATE_TYPE_P (etype)
4907 && TYPE_REVERSE_STORAGE_ORDER (etype))))
4908 {
4909 tree rec_type = make_node (RECORD_TYPE);
4910 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4911 tree field_type, field;
4912
4913 if (AGGREGATE_TYPE_P (etype))
4914 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4915 = TYPE_REVERSE_STORAGE_ORDER (etype);
4916
4917 if (TYPE_UNSIGNED (type))
4918 field_type = make_unsigned_type (prec);
4919 else
4920 field_type = make_signed_type (prec);
4921 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4922
4923 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4924 NULL_TREE, bitsize_zero_node, 1, 0);
4925
4926 finish_record_type (rec_type, field, 1, false);
4927
4928 expr = unchecked_convert (rec_type, expr, notrunc_p);
4929 expr = build_component_ref (expr, field, false);
4930 expr = fold_build1 (NOP_EXPR, type, expr);
4931 }
4932
4933 /* Similarly if we are converting from an integral type whose precision is
4934 not equal to its size, first copy into a field of the given precision
4935 and unchecked convert the record type.
4936
4937 The same considerations as above apply if the target type is an aggregate
4938 type with reverse storage order and we also proceed similarly. */
4939 else if (INTEGRAL_TYPE_P (etype)
4940 && TYPE_RM_SIZE (etype)
4941 && (0 != compare_tree_int (TYPE_RM_SIZE (etype),
4942 GET_MODE_BITSIZE (TYPE_MODE (etype)))
4943 || (AGGREGATE_TYPE_P (type)
4944 && TYPE_REVERSE_STORAGE_ORDER (type))))
4945 {
4946 tree rec_type = make_node (RECORD_TYPE);
4947 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4948 vec<constructor_elt, va_gc> *v;
4949 vec_alloc (v, 1);
4950 tree field_type, field;
4951
4952 if (AGGREGATE_TYPE_P (type))
4953 TYPE_REVERSE_STORAGE_ORDER (rec_type)
4954 = TYPE_REVERSE_STORAGE_ORDER (type);
4955
4956 if (TYPE_UNSIGNED (etype))
4957 field_type = make_unsigned_type (prec);
4958 else
4959 field_type = make_signed_type (prec);
4960 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4961
4962 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4963 NULL_TREE, bitsize_zero_node, 1, 0);
4964
4965 finish_record_type (rec_type, field, 1, false);
4966
4967 expr = fold_build1 (NOP_EXPR, field_type, expr);
4968 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4969 expr = gnat_build_constructor (rec_type, v);
4970 expr = unchecked_convert (type, expr, notrunc_p);
4971 }
4972
4973 /* If we are converting from a scalar type to a type with a different size,
4974 we need to pad to have the same size on both sides.
4975
4976 ??? We cannot do it unconditionally because unchecked conversions are
4977 used liberally by the front-end to implement polymorphism, e.g. in:
4978
4979 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4980 return p___size__4 (p__object!(S191s.all));
4981
4982 so we skip all expressions that are references. */
4983 else if (!REFERENCE_CLASS_P (expr)
4984 && !AGGREGATE_TYPE_P (etype)
4985 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4986 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4987 {
4988 if (c < 0)
4989 {
4990 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4991 false, false, false, true),
4992 expr);
4993 expr = unchecked_convert (type, expr, notrunc_p);
4994 }
4995 else
4996 {
4997 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4998 false, false, false, true);
4999 expr = unchecked_convert (rec_type, expr, notrunc_p);
5000 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5001 }
5002 }
5003
5004 /* We have a special case when we are converting between two unconstrained
5005 array types. In that case, take the address, convert the fat pointer
5006 types, and dereference. */
5007 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5008 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5009 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5010 build_unary_op (ADDR_EXPR, NULL_TREE,
5011 expr)));
5012
5013 /* Another special case is when we are converting to a vector type from its
5014 representative array type; this a regular conversion. */
5015 else if (code == VECTOR_TYPE
5016 && ecode == ARRAY_TYPE
5017 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5018 etype))
5019 expr = convert (type, expr);
5020
5021 /* And, if the array type is not the representative, we try to build an
5022 intermediate vector type of which the array type is the representative
5023 and to do the unchecked conversion between the vector types, in order
5024 to enable further simplifications in the middle-end. */
5025 else if (code == VECTOR_TYPE
5026 && ecode == ARRAY_TYPE
5027 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5028 {
5029 expr = convert (tem, expr);
5030 return unchecked_convert (type, expr, notrunc_p);
5031 }
5032
5033 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5034 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5035 else if (TREE_CODE (expr) == CONSTRUCTOR
5036 && code == RECORD_TYPE
5037 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5038 {
5039 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5040 Empty, false, false, false, true),
5041 expr);
5042 return unchecked_convert (type, expr, notrunc_p);
5043 }
5044
5045 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5046 else
5047 {
5048 expr = maybe_unconstrained_array (expr);
5049 etype = TREE_TYPE (expr);
5050 ecode = TREE_CODE (etype);
5051 if (can_fold_for_view_convert_p (expr))
5052 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5053 else
5054 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5055 }
5056
5057 /* If the result is an integral type whose precision is not equal to its
5058 size, sign- or zero-extend the result. We need not do this if the input
5059 is an integral type of the same precision and signedness or if the output
5060 is a biased type or if both the input and output are unsigned. */
5061 if (!notrunc_p
5062 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5063 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5064 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5065 GET_MODE_BITSIZE (TYPE_MODE (type)))
5066 && !(INTEGRAL_TYPE_P (etype)
5067 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5068 && operand_equal_p (TYPE_RM_SIZE (type),
5069 (TYPE_RM_SIZE (etype) != 0
5070 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5071 0))
5072 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5073 {
5074 tree base_type
5075 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5076 tree shift_expr
5077 = convert (base_type,
5078 size_binop (MINUS_EXPR,
5079 bitsize_int
5080 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5081 TYPE_RM_SIZE (type)));
5082 expr
5083 = convert (type,
5084 build_binary_op (RSHIFT_EXPR, base_type,
5085 build_binary_op (LSHIFT_EXPR, base_type,
5086 convert (base_type, expr),
5087 shift_expr),
5088 shift_expr));
5089 }
5090
5091 /* An unchecked conversion should never raise Constraint_Error. The code
5092 below assumes that GCC's conversion routines overflow the same way that
5093 the underlying hardware does. This is probably true. In the rare case
5094 when it is false, we can rely on the fact that such conversions are
5095 erroneous anyway. */
5096 if (TREE_CODE (expr) == INTEGER_CST)
5097 TREE_OVERFLOW (expr) = 0;
5098
5099 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5100 show no longer constant. */
5101 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5102 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5103 OEP_ONLY_CONST))
5104 TREE_CONSTANT (expr) = 0;
5105
5106 return expr;
5107 }
5108 \f
5109 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5110 the latter being a record type as predicated by Is_Record_Type. */
5111
5112 enum tree_code
5113 tree_code_for_record_type (Entity_Id gnat_type)
5114 {
5115 Node_Id component_list, component;
5116
5117 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5118 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5119 if (!Is_Unchecked_Union (gnat_type))
5120 return RECORD_TYPE;
5121
5122 gnat_type = Implementation_Base_Type (gnat_type);
5123 component_list
5124 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5125
5126 for (component = First_Non_Pragma (Component_Items (component_list));
5127 Present (component);
5128 component = Next_Non_Pragma (component))
5129 if (Ekind (Defining_Entity (component)) == E_Component)
5130 return RECORD_TYPE;
5131
5132 return UNION_TYPE;
5133 }
5134
5135 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5136 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5137 according to the presence of an alignment clause on the type or, if it
5138 is an array, on the component type. */
5139
5140 bool
5141 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5142 {
5143 gnat_type = Underlying_Type (gnat_type);
5144
5145 *align_clause = Present (Alignment_Clause (gnat_type));
5146
5147 if (Is_Array_Type (gnat_type))
5148 {
5149 gnat_type = Underlying_Type (Component_Type (gnat_type));
5150 if (Present (Alignment_Clause (gnat_type)))
5151 *align_clause = true;
5152 }
5153
5154 if (!Is_Floating_Point_Type (gnat_type))
5155 return false;
5156
5157 if (UI_To_Int (Esize (gnat_type)) != 64)
5158 return false;
5159
5160 return true;
5161 }
5162
5163 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5164 size is greater or equal to 64 bits, or an array of such a type. Set
5165 ALIGN_CLAUSE according to the presence of an alignment clause on the
5166 type or, if it is an array, on the component type. */
5167
5168 bool
5169 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5170 {
5171 gnat_type = Underlying_Type (gnat_type);
5172
5173 *align_clause = Present (Alignment_Clause (gnat_type));
5174
5175 if (Is_Array_Type (gnat_type))
5176 {
5177 gnat_type = Underlying_Type (Component_Type (gnat_type));
5178 if (Present (Alignment_Clause (gnat_type)))
5179 *align_clause = true;
5180 }
5181
5182 if (!Is_Scalar_Type (gnat_type))
5183 return false;
5184
5185 if (UI_To_Int (Esize (gnat_type)) < 64)
5186 return false;
5187
5188 return true;
5189 }
5190
5191 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5192 component of an aggregate type. */
5193
5194 bool
5195 type_for_nonaliased_component_p (tree gnu_type)
5196 {
5197 /* If the type is passed by reference, we may have pointers to the
5198 component so it cannot be made non-aliased. */
5199 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5200 return false;
5201
5202 /* We used to say that any component of aggregate type is aliased
5203 because the front-end may take 'Reference of it. The front-end
5204 has been enhanced in the meantime so as to use a renaming instead
5205 in most cases, but the back-end can probably take the address of
5206 such a component too so we go for the conservative stance.
5207
5208 For instance, we might need the address of any array type, even
5209 if normally passed by copy, to construct a fat pointer if the
5210 component is used as an actual for an unconstrained formal.
5211
5212 Likewise for record types: even if a specific record subtype is
5213 passed by copy, the parent type might be passed by ref (e.g. if
5214 it's of variable size) and we might take the address of a child
5215 component to pass to a parent formal. We have no way to check
5216 for such conditions here. */
5217 if (AGGREGATE_TYPE_P (gnu_type))
5218 return false;
5219
5220 return true;
5221 }
5222
5223 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5224
5225 bool
5226 smaller_form_type_p (tree type, tree orig_type)
5227 {
5228 tree size, osize;
5229
5230 /* We're not interested in variants here. */
5231 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5232 return false;
5233
5234 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5235 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5236 return false;
5237
5238 size = TYPE_SIZE (type);
5239 osize = TYPE_SIZE (orig_type);
5240
5241 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5242 return false;
5243
5244 return tree_int_cst_lt (size, osize) != 0;
5245 }
5246
5247 /* Perform final processing on global declarations. */
5248
5249 static GTY (()) tree dummy_global;
5250
5251 void
5252 gnat_write_global_declarations (void)
5253 {
5254 unsigned int i;
5255 tree iter;
5256
5257 /* If we have declared types as used at the global level, insert them in
5258 the global hash table. We use a dummy variable for this purpose, but
5259 we need to build it unconditionally to avoid -fcompare-debug issues. */
5260 if (first_global_object_name)
5261 {
5262 struct varpool_node *node;
5263 char *label;
5264
5265 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5266 dummy_global
5267 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5268 void_type_node);
5269 DECL_HARD_REGISTER (dummy_global) = 1;
5270 TREE_STATIC (dummy_global) = 1;
5271 node = varpool_node::get_create (dummy_global);
5272 node->definition = 1;
5273 node->force_output = 1;
5274
5275 if (types_used_by_cur_var_decl)
5276 while (!types_used_by_cur_var_decl->is_empty ())
5277 {
5278 tree t = types_used_by_cur_var_decl->pop ();
5279 types_used_by_var_decl_insert (t, dummy_global);
5280 }
5281 }
5282
5283 /* Output debug information for all global type declarations first. This
5284 ensures that global types whose compilation hasn't been finalized yet,
5285 for example pointers to Taft amendment types, have their compilation
5286 finalized in the right context. */
5287 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5288 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5289 debug_hooks->type_decl (iter, false);
5290
5291 /* Then output the global variables. We need to do that after the debug
5292 information for global types is emitted so that they are finalized. */
5293 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5294 if (TREE_CODE (iter) == VAR_DECL)
5295 rest_of_decl_compilation (iter, true, 0);
5296
5297 /* Output the imported modules/declarations. In GNAT, these are only
5298 materializing subprogram. */
5299 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5300 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5301 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5302 DECL_CONTEXT (iter), 0);
5303 }
5304
5305 /* ************************************************************************
5306 * * GCC builtins support *
5307 * ************************************************************************ */
5308
5309 /* The general scheme is fairly simple:
5310
5311 For each builtin function/type to be declared, gnat_install_builtins calls
5312 internal facilities which eventually get to gnat_pushdecl, which in turn
5313 tracks the so declared builtin function decls in the 'builtin_decls' global
5314 datastructure. When an Intrinsic subprogram declaration is processed, we
5315 search this global datastructure to retrieve the associated BUILT_IN DECL
5316 node. */
5317
5318 /* Search the chain of currently available builtin declarations for a node
5319 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5320 found, if any, or NULL_TREE otherwise. */
5321 tree
5322 builtin_decl_for (tree name)
5323 {
5324 unsigned i;
5325 tree decl;
5326
5327 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5328 if (DECL_NAME (decl) == name)
5329 return decl;
5330
5331 return NULL_TREE;
5332 }
5333
5334 /* The code below eventually exposes gnat_install_builtins, which declares
5335 the builtin types and functions we might need, either internally or as
5336 user accessible facilities.
5337
5338 ??? This is a first implementation shot, still in rough shape. It is
5339 heavily inspired from the "C" family implementation, with chunks copied
5340 verbatim from there.
5341
5342 Two obvious improvement candidates are:
5343 o Use a more efficient name/decl mapping scheme
5344 o Devise a middle-end infrastructure to avoid having to copy
5345 pieces between front-ends. */
5346
5347 /* ----------------------------------------------------------------------- *
5348 * BUILTIN ELEMENTARY TYPES *
5349 * ----------------------------------------------------------------------- */
5350
5351 /* Standard data types to be used in builtin argument declarations. */
5352
5353 enum c_tree_index
5354 {
5355 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5356 CTI_STRING_TYPE,
5357 CTI_CONST_STRING_TYPE,
5358
5359 CTI_MAX
5360 };
5361
5362 static tree c_global_trees[CTI_MAX];
5363
5364 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5365 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5366 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5367
5368 /* ??? In addition some attribute handlers, we currently don't support a
5369 (small) number of builtin-types, which in turns inhibits support for a
5370 number of builtin functions. */
5371 #define wint_type_node void_type_node
5372 #define intmax_type_node void_type_node
5373 #define uintmax_type_node void_type_node
5374
5375 /* Build the void_list_node (void_type_node having been created). */
5376
5377 static tree
5378 build_void_list_node (void)
5379 {
5380 tree t = build_tree_list (NULL_TREE, void_type_node);
5381 return t;
5382 }
5383
5384 /* Used to help initialize the builtin-types.def table. When a type of
5385 the correct size doesn't exist, use error_mark_node instead of NULL.
5386 The later results in segfaults even when a decl using the type doesn't
5387 get invoked. */
5388
5389 static tree
5390 builtin_type_for_size (int size, bool unsignedp)
5391 {
5392 tree type = gnat_type_for_size (size, unsignedp);
5393 return type ? type : error_mark_node;
5394 }
5395
5396 /* Build/push the elementary type decls that builtin functions/types
5397 will need. */
5398
5399 static void
5400 install_builtin_elementary_types (void)
5401 {
5402 signed_size_type_node = gnat_signed_type (size_type_node);
5403 pid_type_node = integer_type_node;
5404 void_list_node = build_void_list_node ();
5405
5406 string_type_node = build_pointer_type (char_type_node);
5407 const_string_type_node
5408 = build_pointer_type (build_qualified_type
5409 (char_type_node, TYPE_QUAL_CONST));
5410 }
5411
5412 /* ----------------------------------------------------------------------- *
5413 * BUILTIN FUNCTION TYPES *
5414 * ----------------------------------------------------------------------- */
5415
5416 /* Now, builtin function types per se. */
5417
5418 enum c_builtin_type
5419 {
5420 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5421 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5422 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5423 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5424 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5425 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5426 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5427 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5428 ARG6) NAME,
5429 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5430 ARG6, ARG7) NAME,
5431 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5432 ARG6, ARG7, ARG8) NAME,
5433 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5434 ARG6, ARG7, ARG8, ARG9) NAME,
5435 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5436 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5437 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5438 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5439 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5440 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5441 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5442 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5443 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5444 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5445 NAME,
5446 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5447 ARG6) NAME,
5448 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5449 ARG6, ARG7) NAME,
5450 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5451 #include "builtin-types.def"
5452 #undef DEF_PRIMITIVE_TYPE
5453 #undef DEF_FUNCTION_TYPE_0
5454 #undef DEF_FUNCTION_TYPE_1
5455 #undef DEF_FUNCTION_TYPE_2
5456 #undef DEF_FUNCTION_TYPE_3
5457 #undef DEF_FUNCTION_TYPE_4
5458 #undef DEF_FUNCTION_TYPE_5
5459 #undef DEF_FUNCTION_TYPE_6
5460 #undef DEF_FUNCTION_TYPE_7
5461 #undef DEF_FUNCTION_TYPE_8
5462 #undef DEF_FUNCTION_TYPE_9
5463 #undef DEF_FUNCTION_TYPE_10
5464 #undef DEF_FUNCTION_TYPE_11
5465 #undef DEF_FUNCTION_TYPE_VAR_0
5466 #undef DEF_FUNCTION_TYPE_VAR_1
5467 #undef DEF_FUNCTION_TYPE_VAR_2
5468 #undef DEF_FUNCTION_TYPE_VAR_3
5469 #undef DEF_FUNCTION_TYPE_VAR_4
5470 #undef DEF_FUNCTION_TYPE_VAR_5
5471 #undef DEF_FUNCTION_TYPE_VAR_6
5472 #undef DEF_FUNCTION_TYPE_VAR_7
5473 #undef DEF_POINTER_TYPE
5474 BT_LAST
5475 };
5476
5477 typedef enum c_builtin_type builtin_type;
5478
5479 /* A temporary array used in communication with def_fn_type. */
5480 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5481
5482 /* A helper function for install_builtin_types. Build function type
5483 for DEF with return type RET and N arguments. If VAR is true, then the
5484 function should be variadic after those N arguments.
5485
5486 Takes special care not to ICE if any of the types involved are
5487 error_mark_node, which indicates that said type is not in fact available
5488 (see builtin_type_for_size). In which case the function type as a whole
5489 should be error_mark_node. */
5490
5491 static void
5492 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5493 {
5494 tree t;
5495 tree *args = XALLOCAVEC (tree, n);
5496 va_list list;
5497 int i;
5498
5499 va_start (list, n);
5500 for (i = 0; i < n; ++i)
5501 {
5502 builtin_type a = (builtin_type) va_arg (list, int);
5503 t = builtin_types[a];
5504 if (t == error_mark_node)
5505 goto egress;
5506 args[i] = t;
5507 }
5508
5509 t = builtin_types[ret];
5510 if (t == error_mark_node)
5511 goto egress;
5512 if (var)
5513 t = build_varargs_function_type_array (t, n, args);
5514 else
5515 t = build_function_type_array (t, n, args);
5516
5517 egress:
5518 builtin_types[def] = t;
5519 va_end (list);
5520 }
5521
5522 /* Build the builtin function types and install them in the builtin_types
5523 array for later use in builtin function decls. */
5524
5525 static void
5526 install_builtin_function_types (void)
5527 {
5528 tree va_list_ref_type_node;
5529 tree va_list_arg_type_node;
5530
5531 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5532 {
5533 va_list_arg_type_node = va_list_ref_type_node =
5534 build_pointer_type (TREE_TYPE (va_list_type_node));
5535 }
5536 else
5537 {
5538 va_list_arg_type_node = va_list_type_node;
5539 va_list_ref_type_node = build_reference_type (va_list_type_node);
5540 }
5541
5542 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5543 builtin_types[ENUM] = VALUE;
5544 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5545 def_fn_type (ENUM, RETURN, 0, 0);
5546 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5547 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5548 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5549 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5550 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5551 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5552 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5553 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5554 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5555 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5556 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5557 ARG6) \
5558 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5559 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5560 ARG6, ARG7) \
5561 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5562 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5563 ARG6, ARG7, ARG8) \
5564 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5565 ARG7, ARG8);
5566 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5567 ARG6, ARG7, ARG8, ARG9) \
5568 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5569 ARG7, ARG8, ARG9);
5570 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5571 ARG6, ARG7, ARG8, ARG9, ARG10) \
5572 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5573 ARG7, ARG8, ARG9, ARG10);
5574 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5575 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5576 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5577 ARG7, ARG8, ARG9, ARG10, ARG11);
5578 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5579 def_fn_type (ENUM, RETURN, 1, 0);
5580 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5581 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5582 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5583 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5584 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5585 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5586 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5587 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5588 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5589 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5590 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5591 ARG6) \
5592 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5593 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5594 ARG6, ARG7) \
5595 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5596 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5597 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5598
5599 #include "builtin-types.def"
5600
5601 #undef DEF_PRIMITIVE_TYPE
5602 #undef DEF_FUNCTION_TYPE_0
5603 #undef DEF_FUNCTION_TYPE_1
5604 #undef DEF_FUNCTION_TYPE_2
5605 #undef DEF_FUNCTION_TYPE_3
5606 #undef DEF_FUNCTION_TYPE_4
5607 #undef DEF_FUNCTION_TYPE_5
5608 #undef DEF_FUNCTION_TYPE_6
5609 #undef DEF_FUNCTION_TYPE_7
5610 #undef DEF_FUNCTION_TYPE_8
5611 #undef DEF_FUNCTION_TYPE_9
5612 #undef DEF_FUNCTION_TYPE_10
5613 #undef DEF_FUNCTION_TYPE_11
5614 #undef DEF_FUNCTION_TYPE_VAR_0
5615 #undef DEF_FUNCTION_TYPE_VAR_1
5616 #undef DEF_FUNCTION_TYPE_VAR_2
5617 #undef DEF_FUNCTION_TYPE_VAR_3
5618 #undef DEF_FUNCTION_TYPE_VAR_4
5619 #undef DEF_FUNCTION_TYPE_VAR_5
5620 #undef DEF_FUNCTION_TYPE_VAR_6
5621 #undef DEF_FUNCTION_TYPE_VAR_7
5622 #undef DEF_POINTER_TYPE
5623 builtin_types[(int) BT_LAST] = NULL_TREE;
5624 }
5625
5626 /* ----------------------------------------------------------------------- *
5627 * BUILTIN ATTRIBUTES *
5628 * ----------------------------------------------------------------------- */
5629
5630 enum built_in_attribute
5631 {
5632 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5633 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5634 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5635 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5636 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5637 #include "builtin-attrs.def"
5638 #undef DEF_ATTR_NULL_TREE
5639 #undef DEF_ATTR_INT
5640 #undef DEF_ATTR_STRING
5641 #undef DEF_ATTR_IDENT
5642 #undef DEF_ATTR_TREE_LIST
5643 ATTR_LAST
5644 };
5645
5646 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5647
5648 static void
5649 install_builtin_attributes (void)
5650 {
5651 /* Fill in the built_in_attributes array. */
5652 #define DEF_ATTR_NULL_TREE(ENUM) \
5653 built_in_attributes[(int) ENUM] = NULL_TREE;
5654 #define DEF_ATTR_INT(ENUM, VALUE) \
5655 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5656 #define DEF_ATTR_STRING(ENUM, VALUE) \
5657 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5658 #define DEF_ATTR_IDENT(ENUM, STRING) \
5659 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5660 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5661 built_in_attributes[(int) ENUM] \
5662 = tree_cons (built_in_attributes[(int) PURPOSE], \
5663 built_in_attributes[(int) VALUE], \
5664 built_in_attributes[(int) CHAIN]);
5665 #include "builtin-attrs.def"
5666 #undef DEF_ATTR_NULL_TREE
5667 #undef DEF_ATTR_INT
5668 #undef DEF_ATTR_STRING
5669 #undef DEF_ATTR_IDENT
5670 #undef DEF_ATTR_TREE_LIST
5671 }
5672
5673 /* Handle a "const" attribute; arguments as in
5674 struct attribute_spec.handler. */
5675
5676 static tree
5677 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5678 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5679 bool *no_add_attrs)
5680 {
5681 if (TREE_CODE (*node) == FUNCTION_DECL)
5682 TREE_READONLY (*node) = 1;
5683 else
5684 *no_add_attrs = true;
5685
5686 return NULL_TREE;
5687 }
5688
5689 /* Handle a "nothrow" attribute; arguments as in
5690 struct attribute_spec.handler. */
5691
5692 static tree
5693 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5694 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5695 bool *no_add_attrs)
5696 {
5697 if (TREE_CODE (*node) == FUNCTION_DECL)
5698 TREE_NOTHROW (*node) = 1;
5699 else
5700 *no_add_attrs = true;
5701
5702 return NULL_TREE;
5703 }
5704
5705 /* Handle a "pure" attribute; arguments as in
5706 struct attribute_spec.handler. */
5707
5708 static tree
5709 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5710 int ARG_UNUSED (flags), bool *no_add_attrs)
5711 {
5712 if (TREE_CODE (*node) == FUNCTION_DECL)
5713 DECL_PURE_P (*node) = 1;
5714 /* TODO: support types. */
5715 else
5716 {
5717 warning (OPT_Wattributes, "%qs attribute ignored",
5718 IDENTIFIER_POINTER (name));
5719 *no_add_attrs = true;
5720 }
5721
5722 return NULL_TREE;
5723 }
5724
5725 /* Handle a "no vops" attribute; arguments as in
5726 struct attribute_spec.handler. */
5727
5728 static tree
5729 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5730 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5731 bool *ARG_UNUSED (no_add_attrs))
5732 {
5733 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5734 DECL_IS_NOVOPS (*node) = 1;
5735 return NULL_TREE;
5736 }
5737
5738 /* Helper for nonnull attribute handling; fetch the operand number
5739 from the attribute argument list. */
5740
5741 static bool
5742 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5743 {
5744 /* Verify the arg number is a constant. */
5745 if (!tree_fits_uhwi_p (arg_num_expr))
5746 return false;
5747
5748 *valp = TREE_INT_CST_LOW (arg_num_expr);
5749 return true;
5750 }
5751
5752 /* Handle the "nonnull" attribute. */
5753 static tree
5754 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5755 tree args, int ARG_UNUSED (flags),
5756 bool *no_add_attrs)
5757 {
5758 tree type = *node;
5759 unsigned HOST_WIDE_INT attr_arg_num;
5760
5761 /* If no arguments are specified, all pointer arguments should be
5762 non-null. Verify a full prototype is given so that the arguments
5763 will have the correct types when we actually check them later. */
5764 if (!args)
5765 {
5766 if (!prototype_p (type))
5767 {
5768 error ("nonnull attribute without arguments on a non-prototype");
5769 *no_add_attrs = true;
5770 }
5771 return NULL_TREE;
5772 }
5773
5774 /* Argument list specified. Verify that each argument number references
5775 a pointer argument. */
5776 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5777 {
5778 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5779
5780 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5781 {
5782 error ("nonnull argument has invalid operand number (argument %lu)",
5783 (unsigned long) attr_arg_num);
5784 *no_add_attrs = true;
5785 return NULL_TREE;
5786 }
5787
5788 if (prototype_p (type))
5789 {
5790 function_args_iterator iter;
5791 tree argument;
5792
5793 function_args_iter_init (&iter, type);
5794 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5795 {
5796 argument = function_args_iter_cond (&iter);
5797 if (!argument || ck_num == arg_num)
5798 break;
5799 }
5800
5801 if (!argument
5802 || TREE_CODE (argument) == VOID_TYPE)
5803 {
5804 error ("nonnull argument with out-of-range operand number "
5805 "(argument %lu, operand %lu)",
5806 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5807 *no_add_attrs = true;
5808 return NULL_TREE;
5809 }
5810
5811 if (TREE_CODE (argument) != POINTER_TYPE)
5812 {
5813 error ("nonnull argument references non-pointer operand "
5814 "(argument %lu, operand %lu)",
5815 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5816 *no_add_attrs = true;
5817 return NULL_TREE;
5818 }
5819 }
5820 }
5821
5822 return NULL_TREE;
5823 }
5824
5825 /* Handle a "sentinel" attribute. */
5826
5827 static tree
5828 handle_sentinel_attribute (tree *node, tree name, tree args,
5829 int ARG_UNUSED (flags), bool *no_add_attrs)
5830 {
5831 if (!prototype_p (*node))
5832 {
5833 warning (OPT_Wattributes,
5834 "%qs attribute requires prototypes with named arguments",
5835 IDENTIFIER_POINTER (name));
5836 *no_add_attrs = true;
5837 }
5838 else
5839 {
5840 if (!stdarg_p (*node))
5841 {
5842 warning (OPT_Wattributes,
5843 "%qs attribute only applies to variadic functions",
5844 IDENTIFIER_POINTER (name));
5845 *no_add_attrs = true;
5846 }
5847 }
5848
5849 if (args)
5850 {
5851 tree position = TREE_VALUE (args);
5852
5853 if (TREE_CODE (position) != INTEGER_CST)
5854 {
5855 warning (0, "requested position is not an integer constant");
5856 *no_add_attrs = true;
5857 }
5858 else
5859 {
5860 if (tree_int_cst_lt (position, integer_zero_node))
5861 {
5862 warning (0, "requested position is less than zero");
5863 *no_add_attrs = true;
5864 }
5865 }
5866 }
5867
5868 return NULL_TREE;
5869 }
5870
5871 /* Handle a "noreturn" attribute; arguments as in
5872 struct attribute_spec.handler. */
5873
5874 static tree
5875 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5876 int ARG_UNUSED (flags), bool *no_add_attrs)
5877 {
5878 tree type = TREE_TYPE (*node);
5879
5880 /* See FIXME comment in c_common_attribute_table. */
5881 if (TREE_CODE (*node) == FUNCTION_DECL)
5882 TREE_THIS_VOLATILE (*node) = 1;
5883 else if (TREE_CODE (type) == POINTER_TYPE
5884 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5885 TREE_TYPE (*node)
5886 = build_pointer_type
5887 (build_type_variant (TREE_TYPE (type),
5888 TYPE_READONLY (TREE_TYPE (type)), 1));
5889 else
5890 {
5891 warning (OPT_Wattributes, "%qs attribute ignored",
5892 IDENTIFIER_POINTER (name));
5893 *no_add_attrs = true;
5894 }
5895
5896 return NULL_TREE;
5897 }
5898
5899 /* Handle a "leaf" attribute; arguments as in
5900 struct attribute_spec.handler. */
5901
5902 static tree
5903 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5904 int ARG_UNUSED (flags), bool *no_add_attrs)
5905 {
5906 if (TREE_CODE (*node) != FUNCTION_DECL)
5907 {
5908 warning (OPT_Wattributes, "%qE attribute ignored", name);
5909 *no_add_attrs = true;
5910 }
5911 if (!TREE_PUBLIC (*node))
5912 {
5913 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5914 *no_add_attrs = true;
5915 }
5916
5917 return NULL_TREE;
5918 }
5919
5920 /* Handle a "always_inline" attribute; arguments as in
5921 struct attribute_spec.handler. */
5922
5923 static tree
5924 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5925 int ARG_UNUSED (flags), bool *no_add_attrs)
5926 {
5927 if (TREE_CODE (*node) == FUNCTION_DECL)
5928 {
5929 /* Set the attribute and mark it for disregarding inline limits. */
5930 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5931 }
5932 else
5933 {
5934 warning (OPT_Wattributes, "%qE attribute ignored", name);
5935 *no_add_attrs = true;
5936 }
5937
5938 return NULL_TREE;
5939 }
5940
5941 /* Handle a "malloc" attribute; arguments as in
5942 struct attribute_spec.handler. */
5943
5944 static tree
5945 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5946 int ARG_UNUSED (flags), bool *no_add_attrs)
5947 {
5948 if (TREE_CODE (*node) == FUNCTION_DECL
5949 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5950 DECL_IS_MALLOC (*node) = 1;
5951 else
5952 {
5953 warning (OPT_Wattributes, "%qs attribute ignored",
5954 IDENTIFIER_POINTER (name));
5955 *no_add_attrs = true;
5956 }
5957
5958 return NULL_TREE;
5959 }
5960
5961 /* Fake handler for attributes we don't properly support. */
5962
5963 tree
5964 fake_attribute_handler (tree * ARG_UNUSED (node),
5965 tree ARG_UNUSED (name),
5966 tree ARG_UNUSED (args),
5967 int ARG_UNUSED (flags),
5968 bool * ARG_UNUSED (no_add_attrs))
5969 {
5970 return NULL_TREE;
5971 }
5972
5973 /* Handle a "type_generic" attribute. */
5974
5975 static tree
5976 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5977 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5978 bool * ARG_UNUSED (no_add_attrs))
5979 {
5980 /* Ensure we have a function type. */
5981 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5982
5983 /* Ensure we have a variadic function. */
5984 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5985
5986 return NULL_TREE;
5987 }
5988
5989 /* Handle a "vector_size" attribute; arguments as in
5990 struct attribute_spec.handler. */
5991
5992 static tree
5993 handle_vector_size_attribute (tree *node, tree name, tree args,
5994 int ARG_UNUSED (flags), bool *no_add_attrs)
5995 {
5996 tree type = *node;
5997 tree vector_type;
5998
5999 *no_add_attrs = true;
6000
6001 /* We need to provide for vector pointers, vector arrays, and
6002 functions returning vectors. For example:
6003
6004 __attribute__((vector_size(16))) short *foo;
6005
6006 In this case, the mode is SI, but the type being modified is
6007 HI, so we need to look further. */
6008 while (POINTER_TYPE_P (type)
6009 || TREE_CODE (type) == FUNCTION_TYPE
6010 || TREE_CODE (type) == ARRAY_TYPE)
6011 type = TREE_TYPE (type);
6012
6013 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6014 if (!vector_type)
6015 return NULL_TREE;
6016
6017 /* Build back pointers if needed. */
6018 *node = reconstruct_complex_type (*node, vector_type);
6019
6020 return NULL_TREE;
6021 }
6022
6023 /* Handle a "vector_type" attribute; arguments as in
6024 struct attribute_spec.handler. */
6025
6026 static tree
6027 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6028 int ARG_UNUSED (flags), bool *no_add_attrs)
6029 {
6030 tree type = *node;
6031 tree vector_type;
6032
6033 *no_add_attrs = true;
6034
6035 if (TREE_CODE (type) != ARRAY_TYPE)
6036 {
6037 error ("attribute %qs applies to array types only",
6038 IDENTIFIER_POINTER (name));
6039 return NULL_TREE;
6040 }
6041
6042 vector_type = build_vector_type_for_array (type, name);
6043 if (!vector_type)
6044 return NULL_TREE;
6045
6046 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6047 *node = vector_type;
6048
6049 return NULL_TREE;
6050 }
6051
6052 /* ----------------------------------------------------------------------- *
6053 * BUILTIN FUNCTIONS *
6054 * ----------------------------------------------------------------------- */
6055
6056 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6057 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6058 if nonansi_p and flag_no_nonansi_builtin. */
6059
6060 static void
6061 def_builtin_1 (enum built_in_function fncode,
6062 const char *name,
6063 enum built_in_class fnclass,
6064 tree fntype, tree libtype,
6065 bool both_p, bool fallback_p,
6066 bool nonansi_p ATTRIBUTE_UNUSED,
6067 tree fnattrs, bool implicit_p)
6068 {
6069 tree decl;
6070 const char *libname;
6071
6072 /* Preserve an already installed decl. It most likely was setup in advance
6073 (e.g. as part of the internal builtins) for specific reasons. */
6074 if (builtin_decl_explicit (fncode) != NULL_TREE)
6075 return;
6076
6077 gcc_assert ((!both_p && !fallback_p)
6078 || !strncmp (name, "__builtin_",
6079 strlen ("__builtin_")));
6080
6081 libname = name + strlen ("__builtin_");
6082 decl = add_builtin_function (name, fntype, fncode, fnclass,
6083 (fallback_p ? libname : NULL),
6084 fnattrs);
6085 if (both_p)
6086 /* ??? This is normally further controlled by command-line options
6087 like -fno-builtin, but we don't have them for Ada. */
6088 add_builtin_function (libname, libtype, fncode, fnclass,
6089 NULL, fnattrs);
6090
6091 set_builtin_decl (fncode, decl, implicit_p);
6092 }
6093
6094 static int flag_isoc94 = 0;
6095 static int flag_isoc99 = 0;
6096 static int flag_isoc11 = 0;
6097
6098 /* Install what the common builtins.def offers. */
6099
6100 static void
6101 install_builtin_functions (void)
6102 {
6103 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6104 NONANSI_P, ATTRS, IMPLICIT, COND) \
6105 if (NAME && COND) \
6106 def_builtin_1 (ENUM, NAME, CLASS, \
6107 builtin_types[(int) TYPE], \
6108 builtin_types[(int) LIBTYPE], \
6109 BOTH_P, FALLBACK_P, NONANSI_P, \
6110 built_in_attributes[(int) ATTRS], IMPLICIT);
6111 #include "builtins.def"
6112 }
6113
6114 /* ----------------------------------------------------------------------- *
6115 * BUILTIN FUNCTIONS *
6116 * ----------------------------------------------------------------------- */
6117
6118 /* Install the builtin functions we might need. */
6119
6120 void
6121 gnat_install_builtins (void)
6122 {
6123 install_builtin_elementary_types ();
6124 install_builtin_function_types ();
6125 install_builtin_attributes ();
6126
6127 /* Install builtins used by generic middle-end pieces first. Some of these
6128 know about internal specificities and control attributes accordingly, for
6129 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6130 the generic definition from builtins.def. */
6131 build_common_builtin_nodes ();
6132
6133 /* Now, install the target specific builtins, such as the AltiVec family on
6134 ppc, and the common set as exposed by builtins.def. */
6135 targetm.init_builtins ();
6136 install_builtin_functions ();
6137 }
6138
6139 #include "gt-ada-utils.h"
6140 #include "gtype-ada.h"