repinfo.adb (List_Component_Layout): Remove superfluous space for zero-sized field.
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2019, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
101
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
111 affects_type_identity, handler, exclude } */
112 { "const", 0, 0, true, false, false, false,
113 handle_const_attribute, NULL },
114 { "nothrow", 0, 0, true, false, false, false,
115 handle_nothrow_attribute, NULL },
116 { "pure", 0, 0, true, false, false, false,
117 handle_pure_attribute, NULL },
118 { "no vops", 0, 0, true, false, false, false,
119 handle_novops_attribute, NULL },
120 { "nonnull", 0, -1, false, true, true, false,
121 handle_nonnull_attribute, NULL },
122 { "sentinel", 0, 1, false, true, true, false,
123 handle_sentinel_attribute, NULL },
124 { "noreturn", 0, 0, true, false, false, false,
125 handle_noreturn_attribute, NULL },
126 { "noinline", 0, 0, true, false, false, false,
127 handle_noinline_attribute, NULL },
128 { "noclone", 0, 0, true, false, false, false,
129 handle_noclone_attribute, NULL },
130 { "leaf", 0, 0, true, false, false, false,
131 handle_leaf_attribute, NULL },
132 { "always_inline",0, 0, true, false, false, false,
133 handle_always_inline_attribute, NULL },
134 { "malloc", 0, 0, true, false, false, false,
135 handle_malloc_attribute, NULL },
136 { "type generic", 0, 0, false, true, true, false,
137 handle_type_generic_attribute, NULL },
138
139 { "vector_size", 1, 1, false, true, false, false,
140 handle_vector_size_attribute, NULL },
141 { "vector_type", 0, 0, false, true, false, false,
142 handle_vector_type_attribute, NULL },
143 { "may_alias", 0, 0, false, true, false, false, NULL, NULL },
144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, false, fake_attribute_handler,
149 NULL },
150 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler,
151 NULL },
152
153 { NULL, 0, 0, false, false, false, false, NULL, NULL }
154 };
155
156 /* Associates a GNAT tree node to a GCC tree node. It is used in
157 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
158 of `save_gnu_tree' for more info. */
159 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
160
161 #define GET_GNU_TREE(GNAT_ENTITY) \
162 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
163
164 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
165 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
168 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
171 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
172
173 #define GET_DUMMY_NODE(GNAT_ENTITY) \
174 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
175
176 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
177 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
178
179 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
180 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
181
182 /* This variable keeps a table for types for each precision so that we only
183 allocate each of them once. Signed and unsigned types are kept separate.
184
185 Note that these types are only used when fold-const requests something
186 special. Perhaps we should NOT share these types; we'll see how it
187 goes later. */
188 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
189
190 /* Likewise for float types, but record these by mode. */
191 static GTY(()) tree float_types[NUM_MACHINE_MODES];
192
193 /* For each binding contour we allocate a binding_level structure to indicate
194 the binding depth. */
195
196 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
197 /* The binding level containing this one (the enclosing binding level). */
198 struct gnat_binding_level *chain;
199 /* The BLOCK node for this level. */
200 tree block;
201 /* If nonzero, the setjmp buffer that needs to be updated for any
202 variable-sized definition within this context. */
203 tree jmpbuf_decl;
204 };
205
206 /* The binding level currently in effect. */
207 static GTY(()) struct gnat_binding_level *current_binding_level;
208
209 /* A chain of gnat_binding_level structures awaiting reuse. */
210 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
211
212 /* The context to be used for global declarations. */
213 static GTY(()) tree global_context;
214
215 /* An array of global declarations. */
216 static GTY(()) vec<tree, va_gc> *global_decls;
217
218 /* An array of builtin function declarations. */
219 static GTY(()) vec<tree, va_gc> *builtin_decls;
220
221 /* A chain of unused BLOCK nodes. */
222 static GTY((deletable)) tree free_block_chain;
223
224 /* A hash table of padded types. It is modelled on the generic type
225 hash table in tree.c, which must thus be used as a reference. */
226
227 struct GTY((for_user)) pad_type_hash
228 {
229 hashval_t hash;
230 tree type;
231 };
232
233 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
234 {
235 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
236 static bool equal (pad_type_hash *a, pad_type_hash *b);
237
238 static int
239 keep_cache_entry (pad_type_hash *&t)
240 {
241 return ggc_marked_p (t->type);
242 }
243 };
244
245 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
246
247 static tree merge_sizes (tree, tree, tree, bool, bool);
248 static tree fold_bit_position (const_tree);
249 static tree compute_related_constant (tree, tree);
250 static tree split_plus (tree, tree *);
251 static tree float_type_for_precision (int, machine_mode);
252 static tree convert_to_fat_pointer (tree, tree);
253 static unsigned int scale_by_factor_of (tree, unsigned int);
254 static bool potential_alignment_gap (tree, tree, tree);
255
256 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
257 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
258 struct deferred_decl_context_node
259 {
260 /* The ..._DECL node to work on. */
261 tree decl;
262
263 /* The corresponding entity's Scope. */
264 Entity_Id gnat_scope;
265
266 /* The value of force_global when DECL was pushed. */
267 int force_global;
268
269 /* The list of ..._TYPE nodes to propagate the context to. */
270 vec<tree> types;
271
272 /* The next queue item. */
273 struct deferred_decl_context_node *next;
274 };
275
276 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
277
278 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
279 feed it with the elaboration of GNAT_SCOPE. */
280 static struct deferred_decl_context_node *
281 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
282
283 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
284 feed it with the DECL_CONTEXT computed as part of N as soon as it is
285 computed. */
286 static void add_deferred_type_context (struct deferred_decl_context_node *n,
287 tree type);
288 \f
289 /* Initialize data structures of the utils.c module. */
290
291 void
292 init_gnat_utils (void)
293 {
294 /* Initialize the association of GNAT nodes to GCC trees. */
295 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296
297 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
298 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
299
300 /* Initialize the hash table of padded types. */
301 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
302 }
303
304 /* Destroy data structures of the utils.c module. */
305
306 void
307 destroy_gnat_utils (void)
308 {
309 /* Destroy the association of GNAT nodes to GCC trees. */
310 ggc_free (associate_gnat_to_gnu);
311 associate_gnat_to_gnu = NULL;
312
313 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
314 ggc_free (dummy_node_table);
315 dummy_node_table = NULL;
316
317 /* Destroy the hash table of padded types. */
318 pad_type_hash_table->empty ();
319 pad_type_hash_table = NULL;
320 }
321 \f
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
325
326 If GNU_DECL is zero, reset a previous association. */
327
328 void
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
330 {
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
334 Gigi problem. */
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
338
339 SET_GNU_TREE (gnat_entity, gnu_decl);
340 }
341
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
344
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
347
348 tree
349 get_gnu_tree (Entity_Id gnat_entity)
350 {
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
353 }
354
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
356
357 bool
358 present_gnu_tree (Entity_Id gnat_entity)
359 {
360 return PRESENT_GNU_TREE (gnat_entity);
361 }
362 \f
363 /* Make a dummy type corresponding to GNAT_TYPE. */
364
365 tree
366 make_dummy_type (Entity_Id gnat_type)
367 {
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type, debug_type;
370
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
373 if (No (gnat_equiv))
374 gnat_equiv = gnat_type;
375
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
379
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
381 an ENUMERAL_TYPE. */
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
384 : ENUMERAL_TYPE);
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
391
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
393
394 /* Create a debug type so that debuggers only see an unspecified type. */
395 if (Needs_Debug_Info (gnat_type))
396 {
397 debug_type = make_node (LANG_TYPE);
398 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
399 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
400 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
401 }
402
403 return gnu_type;
404 }
405
406 /* Return the dummy type that was made for GNAT_TYPE, if any. */
407
408 tree
409 get_dummy_type (Entity_Id gnat_type)
410 {
411 return GET_DUMMY_NODE (gnat_type);
412 }
413
414 /* Build dummy fat and thin pointer types whose designated type is specified
415 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
416
417 void
418 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
419 {
420 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
421 tree gnu_fat_type, fields, gnu_object_type;
422
423 gnu_template_type = make_node (RECORD_TYPE);
424 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
425 TYPE_DUMMY_P (gnu_template_type) = 1;
426 gnu_ptr_template = build_pointer_type (gnu_template_type);
427
428 gnu_array_type = make_node (ENUMERAL_TYPE);
429 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
430 TYPE_DUMMY_P (gnu_array_type) = 1;
431 gnu_ptr_array = build_pointer_type (gnu_array_type);
432
433 gnu_fat_type = make_node (RECORD_TYPE);
434 /* Build a stub DECL to trigger the special processing for fat pointer types
435 in gnat_pushdecl. */
436 TYPE_NAME (gnu_fat_type)
437 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
438 gnu_fat_type);
439 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
440 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
441 DECL_CHAIN (fields)
442 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
443 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
444 finish_fat_pointer_type (gnu_fat_type, fields);
445 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
446 /* Suppress debug info until after the type is completed. */
447 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
448
449 gnu_object_type = make_node (RECORD_TYPE);
450 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
451 TYPE_DUMMY_P (gnu_object_type) = 1;
452
453 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
454 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
455 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
456 }
457 \f
458 /* Return true if we are in the global binding level. */
459
460 bool
461 global_bindings_p (void)
462 {
463 return force_global || !current_function_decl;
464 }
465
466 /* Enter a new binding level. */
467
468 void
469 gnat_pushlevel (void)
470 {
471 struct gnat_binding_level *newlevel = NULL;
472
473 /* Reuse a struct for this binding level, if there is one. */
474 if (free_binding_level)
475 {
476 newlevel = free_binding_level;
477 free_binding_level = free_binding_level->chain;
478 }
479 else
480 newlevel = ggc_alloc<gnat_binding_level> ();
481
482 /* Use a free BLOCK, if any; otherwise, allocate one. */
483 if (free_block_chain)
484 {
485 newlevel->block = free_block_chain;
486 free_block_chain = BLOCK_CHAIN (free_block_chain);
487 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
488 }
489 else
490 newlevel->block = make_node (BLOCK);
491
492 /* Point the BLOCK we just made to its parent. */
493 if (current_binding_level)
494 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
495
496 BLOCK_VARS (newlevel->block) = NULL_TREE;
497 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
498 TREE_USED (newlevel->block) = 1;
499
500 /* Add this level to the front of the chain (stack) of active levels. */
501 newlevel->chain = current_binding_level;
502 newlevel->jmpbuf_decl = NULL_TREE;
503 current_binding_level = newlevel;
504 }
505
506 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
507 and point FNDECL to this BLOCK. */
508
509 void
510 set_current_block_context (tree fndecl)
511 {
512 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
513 DECL_INITIAL (fndecl) = current_binding_level->block;
514 set_block_for_group (current_binding_level->block);
515 }
516
517 /* Set the jmpbuf_decl for the current binding level to DECL. */
518
519 void
520 set_block_jmpbuf_decl (tree decl)
521 {
522 current_binding_level->jmpbuf_decl = decl;
523 }
524
525 /* Get the jmpbuf_decl, if any, for the current binding level. */
526
527 tree
528 get_block_jmpbuf_decl (void)
529 {
530 return current_binding_level->jmpbuf_decl;
531 }
532
533 /* Exit a binding level. Set any BLOCK into the current code group. */
534
535 void
536 gnat_poplevel (void)
537 {
538 struct gnat_binding_level *level = current_binding_level;
539 tree block = level->block;
540
541 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
542 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
543
544 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
545 are no variables free the block and merge its subblocks into those of its
546 parent block. Otherwise, add it to the list of its parent. */
547 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
548 ;
549 else if (!BLOCK_VARS (block))
550 {
551 BLOCK_SUBBLOCKS (level->chain->block)
552 = block_chainon (BLOCK_SUBBLOCKS (block),
553 BLOCK_SUBBLOCKS (level->chain->block));
554 BLOCK_CHAIN (block) = free_block_chain;
555 free_block_chain = block;
556 }
557 else
558 {
559 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
560 BLOCK_SUBBLOCKS (level->chain->block) = block;
561 TREE_USED (block) = 1;
562 set_block_for_group (block);
563 }
564
565 /* Free this binding structure. */
566 current_binding_level = level->chain;
567 level->chain = free_binding_level;
568 free_binding_level = level;
569 }
570
571 /* Exit a binding level and discard the associated BLOCK. */
572
573 void
574 gnat_zaplevel (void)
575 {
576 struct gnat_binding_level *level = current_binding_level;
577 tree block = level->block;
578
579 BLOCK_CHAIN (block) = free_block_chain;
580 free_block_chain = block;
581
582 /* Free this binding structure. */
583 current_binding_level = level->chain;
584 level->chain = free_binding_level;
585 free_binding_level = level;
586 }
587 \f
588 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
589
590 static void
591 gnat_set_type_context (tree type, tree context)
592 {
593 tree decl = TYPE_STUB_DECL (type);
594
595 TYPE_CONTEXT (type) = context;
596
597 while (decl && DECL_PARALLEL_TYPE (decl))
598 {
599 tree parallel_type = DECL_PARALLEL_TYPE (decl);
600
601 /* Give a context to the parallel types and their stub decl, if any.
602 Some parallel types seems to be present in multiple parallel type
603 chains, so don't mess with their context if they already have one. */
604 if (!TYPE_CONTEXT (parallel_type))
605 {
606 if (TYPE_STUB_DECL (parallel_type))
607 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
608 TYPE_CONTEXT (parallel_type) = context;
609 }
610
611 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
612 }
613 }
614
615 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
616 the debug info, or Empty if there is no such scope. If not NULL, set
617 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
618
619 Entity_Id
620 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
621 {
622 Entity_Id gnat_entity;
623
624 if (is_subprogram)
625 *is_subprogram = false;
626
627 if (Nkind (gnat_node) == N_Defining_Identifier
628 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
629 gnat_entity = Scope (gnat_node);
630 else
631 return Empty;
632
633 while (Present (gnat_entity))
634 {
635 switch (Ekind (gnat_entity))
636 {
637 case E_Function:
638 case E_Procedure:
639 if (Present (Protected_Body_Subprogram (gnat_entity)))
640 gnat_entity = Protected_Body_Subprogram (gnat_entity);
641
642 /* If the scope is a subprogram, then just rely on
643 current_function_decl, so that we don't have to defer
644 anything. This is needed because other places rely on the
645 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
646 if (is_subprogram)
647 *is_subprogram = true;
648 return gnat_entity;
649
650 case E_Record_Type:
651 case E_Record_Subtype:
652 return gnat_entity;
653
654 default:
655 /* By default, we are not interested in this particular scope: go to
656 the outer one. */
657 break;
658 }
659
660 gnat_entity = Scope (gnat_entity);
661 }
662
663 return Empty;
664 }
665
666 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
667 of N otherwise. */
668
669 static void
670 defer_or_set_type_context (tree type, tree context,
671 struct deferred_decl_context_node *n)
672 {
673 if (n)
674 add_deferred_type_context (n, type);
675 else
676 gnat_set_type_context (type, context);
677 }
678
679 /* Return global_context, but create it first if need be. */
680
681 static tree
682 get_global_context (void)
683 {
684 if (!global_context)
685 {
686 global_context
687 = build_translation_unit_decl (get_identifier (main_input_filename));
688 debug_hooks->register_main_translation_unit (global_context);
689 }
690
691 return global_context;
692 }
693
694 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
695 for location information and flag propagation. */
696
697 void
698 gnat_pushdecl (tree decl, Node_Id gnat_node)
699 {
700 tree context = NULL_TREE;
701 struct deferred_decl_context_node *deferred_decl_context = NULL;
702
703 /* If explicitely asked to make DECL global or if it's an imported nested
704 object, short-circuit the regular Scope-based context computation. */
705 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
706 {
707 /* Rely on the GNAT scope, or fallback to the current_function_decl if
708 the GNAT scope reached the global scope, if it reached a subprogram
709 or the declaration is a subprogram or a variable (for them we skip
710 intermediate context types because the subprogram body elaboration
711 machinery and the inliner both expect a subprogram context).
712
713 Falling back to current_function_decl is necessary for implicit
714 subprograms created by gigi, such as the elaboration subprograms. */
715 bool context_is_subprogram = false;
716 const Entity_Id gnat_scope
717 = get_debug_scope (gnat_node, &context_is_subprogram);
718
719 if (Present (gnat_scope)
720 && !context_is_subprogram
721 && TREE_CODE (decl) != FUNCTION_DECL
722 && TREE_CODE (decl) != VAR_DECL)
723 /* Always assume the scope has not been elaborated, thus defer the
724 context propagation to the time its elaboration will be
725 available. */
726 deferred_decl_context
727 = add_deferred_decl_context (decl, gnat_scope, force_global);
728
729 /* External declarations (when force_global > 0) may not be in a
730 local context. */
731 else if (current_function_decl && force_global == 0)
732 context = current_function_decl;
733 }
734
735 /* If either we are forced to be in global mode or if both the GNAT scope and
736 the current_function_decl did not help in determining the context, use the
737 global scope. */
738 if (!deferred_decl_context && !context)
739 context = get_global_context ();
740
741 /* Functions imported in another function are not really nested.
742 For really nested functions mark them initially as needing
743 a static chain for uses of that flag before unnesting;
744 lower_nested_functions will then recompute it. */
745 if (TREE_CODE (decl) == FUNCTION_DECL
746 && !TREE_PUBLIC (decl)
747 && context
748 && (TREE_CODE (context) == FUNCTION_DECL
749 || decl_function_context (context)))
750 DECL_STATIC_CHAIN (decl) = 1;
751
752 if (!deferred_decl_context)
753 DECL_CONTEXT (decl) = context;
754
755 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
756
757 /* Set the location of DECL and emit a declaration for it. */
758 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
759 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
760
761 add_decl_expr (decl, gnat_node);
762
763 /* Put the declaration on the list. The list of declarations is in reverse
764 order. The list will be reversed later. Put global declarations in the
765 globals list and local ones in the current block. But skip TYPE_DECLs
766 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
767 with the debugger and aren't needed anyway. */
768 if (!(TREE_CODE (decl) == TYPE_DECL
769 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
770 {
771 /* External declarations must go to the binding level they belong to.
772 This will make corresponding imported entities are available in the
773 debugger at the proper time. */
774 if (DECL_EXTERNAL (decl)
775 && TREE_CODE (decl) == FUNCTION_DECL
776 && fndecl_built_in_p (decl))
777 vec_safe_push (builtin_decls, decl);
778 else if (global_bindings_p ())
779 vec_safe_push (global_decls, decl);
780 else
781 {
782 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
783 BLOCK_VARS (current_binding_level->block) = decl;
784 }
785 }
786
787 /* For the declaration of a type, set its name either if it isn't already
788 set or if the previous type name was not derived from a source name.
789 We'd rather have the type named with a real name and all the pointer
790 types to the same object have the same node, except when the names are
791 both derived from source names. */
792 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
793 {
794 tree t = TREE_TYPE (decl);
795
796 /* Array and pointer types aren't tagged types in the C sense so we need
797 to generate a typedef in DWARF for them and make sure it is preserved,
798 unless the type is artificial. */
799 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
800 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
801 || DECL_ARTIFICIAL (decl)))
802 ;
803 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
804 generate the typedef in DWARF. Also do that for fat pointer types
805 because, even though they are tagged types in the C sense, they are
806 still XUP types attached to the base array type at this point. */
807 else if (!DECL_ARTIFICIAL (decl)
808 && (TREE_CODE (t) == ARRAY_TYPE
809 || TREE_CODE (t) == POINTER_TYPE
810 || TYPE_IS_FAT_POINTER_P (t)))
811 {
812 tree tt = build_variant_type_copy (t);
813 TYPE_NAME (tt) = decl;
814 defer_or_set_type_context (tt,
815 DECL_CONTEXT (decl),
816 deferred_decl_context);
817 TREE_TYPE (decl) = tt;
818 if (TYPE_NAME (t)
819 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
820 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
821 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
822 else
823 DECL_ORIGINAL_TYPE (decl) = t;
824 /* Array types need to have a name so that they can be related to
825 their GNAT encodings. */
826 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
827 TYPE_NAME (t) = DECL_NAME (decl);
828 t = NULL_TREE;
829 }
830 else if (TYPE_NAME (t)
831 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
832 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
833 ;
834 else
835 t = NULL_TREE;
836
837 /* Propagate the name to all the variants, this is needed for the type
838 qualifiers machinery to work properly (see check_qualified_type).
839 Also propagate the context to them. Note that it will be propagated
840 to all parallel types too thanks to gnat_set_type_context. */
841 if (t)
842 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
843 /* ??? Because of the previous kludge, we can have variants of fat
844 pointer types with different names. */
845 if (!(TYPE_IS_FAT_POINTER_P (t)
846 && TYPE_NAME (t)
847 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
848 {
849 TYPE_NAME (t) = decl;
850 defer_or_set_type_context (t,
851 DECL_CONTEXT (decl),
852 deferred_decl_context);
853 }
854 }
855 }
856 \f
857 /* Create a record type that contains a SIZE bytes long field of TYPE with a
858 starting bit position so that it is aligned to ALIGN bits, and leaving at
859 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
860 record is guaranteed to get. GNAT_NODE is used for the position of the
861 associated TYPE_DECL. */
862
863 tree
864 make_aligning_type (tree type, unsigned int align, tree size,
865 unsigned int base_align, int room, Node_Id gnat_node)
866 {
867 /* We will be crafting a record type with one field at a position set to be
868 the next multiple of ALIGN past record'address + room bytes. We use a
869 record placeholder to express record'address. */
870 tree record_type = make_node (RECORD_TYPE);
871 tree record = build0 (PLACEHOLDER_EXPR, record_type);
872
873 tree record_addr_st
874 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
875
876 /* The diagram below summarizes the shape of what we manipulate:
877
878 <--------- pos ---------->
879 { +------------+-------------+-----------------+
880 record =>{ |############| ... | field (type) |
881 { +------------+-------------+-----------------+
882 |<-- room -->|<- voffset ->|<---- size ----->|
883 o o
884 | |
885 record_addr vblock_addr
886
887 Every length is in sizetype bytes there, except "pos" which has to be
888 set as a bit position in the GCC tree for the record. */
889 tree room_st = size_int (room);
890 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
891 tree voffset_st, pos, field;
892
893 tree name = TYPE_IDENTIFIER (type);
894
895 name = concat_name (name, "ALIGN");
896 TYPE_NAME (record_type) = name;
897
898 /* Compute VOFFSET and then POS. The next byte position multiple of some
899 alignment after some address is obtained by "and"ing the alignment minus
900 1 with the two's complement of the address. */
901 voffset_st = size_binop (BIT_AND_EXPR,
902 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
903 size_int ((align / BITS_PER_UNIT) - 1));
904
905 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
906 pos = size_binop (MULT_EXPR,
907 convert (bitsizetype,
908 size_binop (PLUS_EXPR, room_st, voffset_st)),
909 bitsize_unit_node);
910
911 /* Craft the GCC record representation. We exceptionally do everything
912 manually here because 1) our generic circuitry is not quite ready to
913 handle the complex position/size expressions we are setting up, 2) we
914 have a strong simplifying factor at hand: we know the maximum possible
915 value of voffset, and 3) we have to set/reset at least the sizes in
916 accordance with this maximum value anyway, as we need them to convey
917 what should be "alloc"ated for this type.
918
919 Use -1 as the 'addressable' indication for the field to prevent the
920 creation of a bitfield. We don't need one, it would have damaging
921 consequences on the alignment computation, and create_field_decl would
922 make one without this special argument, for instance because of the
923 complex position expression. */
924 field = create_field_decl (get_identifier ("F"), type, record_type, size,
925 pos, 1, -1);
926 TYPE_FIELDS (record_type) = field;
927
928 SET_TYPE_ALIGN (record_type, base_align);
929 TYPE_USER_ALIGN (record_type) = 1;
930
931 TYPE_SIZE (record_type)
932 = size_binop (PLUS_EXPR,
933 size_binop (MULT_EXPR, convert (bitsizetype, size),
934 bitsize_unit_node),
935 bitsize_int (align + room * BITS_PER_UNIT));
936 TYPE_SIZE_UNIT (record_type)
937 = size_binop (PLUS_EXPR, size,
938 size_int (room + align / BITS_PER_UNIT));
939
940 SET_TYPE_MODE (record_type, BLKmode);
941 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
942
943 /* Declare it now since it will never be declared otherwise. This is
944 necessary to ensure that its subtrees are properly marked. */
945 create_type_decl (name, record_type, true, false, gnat_node);
946
947 return record_type;
948 }
949
950 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
951 as the field type of a packed record if IN_RECORD is true, or as the
952 component type of a packed array if IN_RECORD is false. See if we can
953 rewrite it either as a type that has non-BLKmode, which we can pack
954 tighter in the packed record case, or as a smaller type with at most
955 MAX_ALIGN alignment if the value is non-zero. If so, return the new
956 type; if not, return the original type. */
957
958 tree
959 make_packable_type (tree type, bool in_record, unsigned int max_align)
960 {
961 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
962 unsigned HOST_WIDE_INT new_size;
963 unsigned int align = TYPE_ALIGN (type);
964 unsigned int new_align;
965
966 /* No point in doing anything if the size is zero. */
967 if (size == 0)
968 return type;
969
970 tree new_type = make_node (TREE_CODE (type));
971
972 /* Copy the name and flags from the old type to that of the new.
973 Note that we rely on the pointer equality created here for
974 TYPE_NAME to look through conversions in various places. */
975 TYPE_NAME (new_type) = TYPE_NAME (type);
976 TYPE_PACKED (new_type) = 1;
977 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
978 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
979 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
980 if (TREE_CODE (type) == RECORD_TYPE)
981 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
982
983 /* If we are in a record and have a small size, set the alignment to
984 try for an integral mode. Otherwise set it to try for a smaller
985 type with BLKmode. */
986 if (in_record && size <= MAX_FIXED_MODE_SIZE)
987 {
988 new_size = ceil_pow2 (size);
989 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
990 SET_TYPE_ALIGN (new_type, new_align);
991 }
992 else
993 {
994 tree type_size = TYPE_ADA_SIZE (type);
995 /* Do not try to shrink the size if the RM size is not constant. */
996 if (TYPE_CONTAINS_TEMPLATE_P (type)
997 || !tree_fits_uhwi_p (type_size))
998 return type;
999
1000 /* Round the RM size up to a unit boundary to get the minimal size
1001 for a BLKmode record. Give up if it's already the size and we
1002 don't need to lower the alignment. */
1003 new_size = tree_to_uhwi (type_size);
1004 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1005 if (new_size == size && (max_align == 0 || align <= max_align))
1006 return type;
1007
1008 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1009 if (max_align > 0 && new_align > max_align)
1010 new_align = max_align;
1011 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1012 }
1013
1014 TYPE_USER_ALIGN (new_type) = 1;
1015
1016 /* Now copy the fields, keeping the position and size as we don't want
1017 to change the layout by propagating the packedness downwards. */
1018 tree new_field_list = NULL_TREE;
1019 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1020 {
1021 tree new_field_type = TREE_TYPE (field);
1022 tree new_field, new_field_size;
1023
1024 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1025 && !TYPE_FAT_POINTER_P (new_field_type)
1026 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1027 new_field_type = make_packable_type (new_field_type, true, max_align);
1028
1029 /* However, for the last field in a not already packed record type
1030 that is of an aggregate type, we need to use the RM size in the
1031 packable version of the record type, see finish_record_type. */
1032 if (!DECL_CHAIN (field)
1033 && !TYPE_PACKED (type)
1034 && RECORD_OR_UNION_TYPE_P (new_field_type)
1035 && !TYPE_FAT_POINTER_P (new_field_type)
1036 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1037 && TYPE_ADA_SIZE (new_field_type))
1038 new_field_size = TYPE_ADA_SIZE (new_field_type);
1039 else
1040 new_field_size = DECL_SIZE (field);
1041
1042 /* This is a layout with full representation, alignment and size clauses
1043 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case. */
1044 new_field
1045 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1046 new_field_size, bit_position (field), 0,
1047 !DECL_NONADDRESSABLE_P (field));
1048
1049 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1050 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1051 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1052 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1053
1054 DECL_CHAIN (new_field) = new_field_list;
1055 new_field_list = new_field;
1056 }
1057
1058 /* If this is a padding record, we never want to make the size smaller
1059 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1060 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1061 {
1062 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1063 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1064 new_size = size;
1065 }
1066 else
1067 {
1068 TYPE_SIZE (new_type) = bitsize_int (new_size);
1069 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1070 }
1071
1072 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1073 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1074
1075 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1076 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1077 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1078 SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1079 else if (TYPE_STUB_DECL (type))
1080 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1081 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1082
1083 /* Try harder to get a packable type if necessary, for example
1084 in case the record itself contains a BLKmode field. */
1085 if (in_record && TYPE_MODE (new_type) == BLKmode)
1086 SET_TYPE_MODE (new_type,
1087 mode_for_size_tree (TYPE_SIZE (new_type),
1088 MODE_INT, 1).else_blk ());
1089
1090 /* If neither mode nor size nor alignment shrunk, return the old type. */
1091 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1092 return type;
1093
1094 return new_type;
1095 }
1096
1097 /* Return true if TYPE has an unsigned representation. This needs to be used
1098 when the representation of types whose precision is not equal to their size
1099 is manipulated based on the RM size. */
1100
1101 static inline bool
1102 type_unsigned_for_rm (tree type)
1103 {
1104 /* This is the common case. */
1105 if (TYPE_UNSIGNED (type))
1106 return true;
1107
1108 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1109 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1110 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1111 return true;
1112
1113 return false;
1114 }
1115
1116 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1117 If TYPE is the best type, return it. Otherwise, make a new type. We
1118 only support new integral and pointer types. FOR_BIASED is true if
1119 we are making a biased type. */
1120
1121 tree
1122 make_type_from_size (tree type, tree size_tree, bool for_biased)
1123 {
1124 unsigned HOST_WIDE_INT size;
1125 bool biased_p;
1126 tree new_type;
1127
1128 /* If size indicates an error, just return TYPE to avoid propagating
1129 the error. Likewise if it's too large to represent. */
1130 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1131 return type;
1132
1133 size = tree_to_uhwi (size_tree);
1134
1135 switch (TREE_CODE (type))
1136 {
1137 case BOOLEAN_TYPE:
1138 /* Do not mess with boolean types that have foreign convention. */
1139 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1140 break;
1141
1142 /* ... fall through ... */
1143
1144 case INTEGER_TYPE:
1145 case ENUMERAL_TYPE:
1146 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1147 && TYPE_BIASED_REPRESENTATION_P (type));
1148
1149 /* Integer types with precision 0 are forbidden. */
1150 if (size == 0)
1151 size = 1;
1152
1153 /* Only do something if the type isn't a packed array type and doesn't
1154 already have the proper size and the size isn't too large. */
1155 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1156 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1157 || size > LONG_LONG_TYPE_SIZE)
1158 break;
1159
1160 biased_p |= for_biased;
1161
1162 /* The type should be an unsigned type if the original type is unsigned
1163 or if the lower bound is constant and non-negative or if the type is
1164 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1165 if (type_unsigned_for_rm (type) || biased_p)
1166 new_type = make_unsigned_type (size);
1167 else
1168 new_type = make_signed_type (size);
1169 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1170 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1171 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1172 /* Copy the name to show that it's essentially the same type and
1173 not a subrange type. */
1174 TYPE_NAME (new_type) = TYPE_NAME (type);
1175 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1176 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1177 return new_type;
1178
1179 case RECORD_TYPE:
1180 /* Do something if this is a fat pointer, in which case we
1181 may need to return the thin pointer. */
1182 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1183 {
1184 scalar_int_mode p_mode;
1185 if (!int_mode_for_size (size, 0).exists (&p_mode)
1186 || !targetm.valid_pointer_mode (p_mode))
1187 p_mode = ptr_mode;
1188 return
1189 build_pointer_type_for_mode
1190 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1191 p_mode, 0);
1192 }
1193 break;
1194
1195 case POINTER_TYPE:
1196 /* Only do something if this is a thin pointer, in which case we
1197 may need to return the fat pointer. */
1198 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1199 return
1200 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1201 break;
1202
1203 default:
1204 break;
1205 }
1206
1207 return type;
1208 }
1209
1210 /* Return true iff the padded types are equivalent. */
1211
1212 bool
1213 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1214 {
1215 tree type1, type2;
1216
1217 if (t1->hash != t2->hash)
1218 return 0;
1219
1220 type1 = t1->type;
1221 type2 = t2->type;
1222
1223 /* We consider that the padded types are equivalent if they pad the same type
1224 and have the same size, alignment, RM size and storage order. Taking the
1225 mode into account is redundant since it is determined by the others. */
1226 return
1227 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1228 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1229 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1230 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1231 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1232 }
1233
1234 /* Compute the hash value for the padded TYPE. */
1235
1236 static hashval_t
1237 hash_pad_type (tree type)
1238 {
1239 hashval_t hashcode;
1240
1241 hashcode
1242 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1243 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1244 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1245 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1246
1247 return hashcode;
1248 }
1249
1250 /* Look up the padded TYPE in the hash table and return its canonical version
1251 if it exists; otherwise, insert it into the hash table. */
1252
1253 static tree
1254 canonicalize_pad_type (tree type)
1255 {
1256 const hashval_t hashcode = hash_pad_type (type);
1257 struct pad_type_hash in, *h, **slot;
1258
1259 in.hash = hashcode;
1260 in.type = type;
1261 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1262 h = *slot;
1263 if (!h)
1264 {
1265 h = ggc_alloc<pad_type_hash> ();
1266 h->hash = hashcode;
1267 h->type = type;
1268 *slot = h;
1269 }
1270
1271 return h->type;
1272 }
1273
1274 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1275 if needed. We have already verified that SIZE and ALIGN are large enough.
1276 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1277 IS_COMPONENT_TYPE is true if this is being done for the component type of
1278 an array. IS_USER_TYPE is true if the original type needs to be completed.
1279 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1280 the RM size of the resulting type is to be set to SIZE too; in this case,
1281 the padded type is canonicalized before being returned. */
1282
1283 tree
1284 maybe_pad_type (tree type, tree size, unsigned int align,
1285 Entity_Id gnat_entity, bool is_component_type,
1286 bool is_user_type, bool definition, bool set_rm_size)
1287 {
1288 tree orig_size = TYPE_SIZE (type);
1289 unsigned int orig_align = TYPE_ALIGN (type);
1290 tree record, field;
1291
1292 /* If TYPE is a padded type, see if it agrees with any size and alignment
1293 we were given. If so, return the original type. Otherwise, strip
1294 off the padding, since we will either be returning the inner type
1295 or repadding it. If no size or alignment is specified, use that of
1296 the original padded type. */
1297 if (TYPE_IS_PADDING_P (type))
1298 {
1299 if ((!size
1300 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1301 && (align == 0 || align == orig_align))
1302 return type;
1303
1304 if (!size)
1305 size = orig_size;
1306 if (align == 0)
1307 align = orig_align;
1308
1309 type = TREE_TYPE (TYPE_FIELDS (type));
1310 orig_size = TYPE_SIZE (type);
1311 orig_align = TYPE_ALIGN (type);
1312 }
1313
1314 /* If the size is either not being changed or is being made smaller (which
1315 is not done here and is only valid for bitfields anyway), show the size
1316 isn't changing. Likewise, clear the alignment if it isn't being
1317 changed. Then return if we aren't doing anything. */
1318 if (size
1319 && (operand_equal_p (size, orig_size, 0)
1320 || (TREE_CODE (orig_size) == INTEGER_CST
1321 && tree_int_cst_lt (size, orig_size))))
1322 size = NULL_TREE;
1323
1324 if (align == orig_align)
1325 align = 0;
1326
1327 if (align == 0 && !size)
1328 return type;
1329
1330 /* If requested, complete the original type and give it a name. */
1331 if (is_user_type)
1332 create_type_decl (get_entity_name (gnat_entity), type,
1333 !Comes_From_Source (gnat_entity),
1334 !(TYPE_NAME (type)
1335 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1336 && DECL_IGNORED_P (TYPE_NAME (type))),
1337 gnat_entity);
1338
1339 /* We used to modify the record in place in some cases, but that could
1340 generate incorrect debugging information. So make a new record
1341 type and name. */
1342 record = make_node (RECORD_TYPE);
1343 TYPE_PADDING_P (record) = 1;
1344
1345 /* ??? Padding types around packed array implementation types will be
1346 considered as root types in the array descriptor language hook (see
1347 gnat_get_array_descr_info). Give them the original packed array type
1348 name so that the one coming from sources appears in the debugging
1349 information. */
1350 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1351 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1352 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1353 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1354 else if (Present (gnat_entity))
1355 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1356
1357 SET_TYPE_ALIGN (record, align ? align : orig_align);
1358 TYPE_SIZE (record) = size ? size : orig_size;
1359 TYPE_SIZE_UNIT (record)
1360 = convert (sizetype,
1361 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1362 bitsize_unit_node));
1363
1364 /* If we are changing the alignment and the input type is a record with
1365 BLKmode and a small constant size, try to make a form that has an
1366 integral mode. This might allow the padding record to also have an
1367 integral mode, which will be much more efficient. There is no point
1368 in doing so if a size is specified unless it is also a small constant
1369 size and it is incorrect to do so if we cannot guarantee that the mode
1370 will be naturally aligned since the field must always be addressable.
1371
1372 ??? This might not always be a win when done for a stand-alone object:
1373 since the nominal and the effective type of the object will now have
1374 different modes, a VIEW_CONVERT_EXPR will be required for converting
1375 between them and it might be hard to overcome afterwards, including
1376 at the RTL level when the stand-alone object is accessed as a whole. */
1377 if (align != 0
1378 && RECORD_OR_UNION_TYPE_P (type)
1379 && TYPE_MODE (type) == BLKmode
1380 && !TYPE_BY_REFERENCE_P (type)
1381 && TREE_CODE (orig_size) == INTEGER_CST
1382 && !TREE_OVERFLOW (orig_size)
1383 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1384 && (!size
1385 || (TREE_CODE (size) == INTEGER_CST
1386 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1387 {
1388 tree packable_type = make_packable_type (type, true);
1389 if (TYPE_MODE (packable_type) != BLKmode
1390 && align >= TYPE_ALIGN (packable_type))
1391 type = packable_type;
1392 }
1393
1394 /* Now create the field with the original size. */
1395 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1396 bitsize_zero_node, 0, 1);
1397 DECL_INTERNAL_P (field) = 1;
1398
1399 /* We will output additional debug info manually below. */
1400 finish_record_type (record, field, 1, false);
1401
1402 /* Set the RM size if requested. */
1403 if (set_rm_size)
1404 {
1405 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1406
1407 /* If the padded type is complete and has constant size, we canonicalize
1408 it by means of the hash table. This is consistent with the language
1409 semantics and ensures that gigi and the middle-end have a common view
1410 of these padded types. */
1411 if (TREE_CONSTANT (TYPE_SIZE (record)))
1412 {
1413 tree canonical = canonicalize_pad_type (record);
1414 if (canonical != record)
1415 {
1416 record = canonical;
1417 goto built;
1418 }
1419 }
1420 }
1421
1422 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1423 SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1424
1425 /* Unless debugging information isn't being written for the input type,
1426 write a record that shows what we are a subtype of and also make a
1427 variable that indicates our size, if still variable. */
1428 if (TREE_CODE (orig_size) != INTEGER_CST
1429 && TYPE_NAME (record)
1430 && TYPE_NAME (type)
1431 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1432 && DECL_IGNORED_P (TYPE_NAME (type))))
1433 {
1434 tree name = TYPE_IDENTIFIER (record);
1435 tree size_unit = TYPE_SIZE_UNIT (record);
1436
1437 /* A variable that holds the size is required even with no encoding since
1438 it will be referenced by debugging information attributes. At global
1439 level, we need a single variable across all translation units. */
1440 if (size
1441 && TREE_CODE (size) != INTEGER_CST
1442 && (definition || global_bindings_p ()))
1443 {
1444 /* Whether or not gnat_entity comes from source, this XVZ variable is
1445 is a compilation artifact. */
1446 size_unit
1447 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1448 size_unit, true, global_bindings_p (),
1449 !definition && global_bindings_p (), false,
1450 false, true, true, NULL, gnat_entity);
1451 TYPE_SIZE_UNIT (record) = size_unit;
1452 }
1453
1454 /* There is no need to show what we are a subtype of when outputting as
1455 few encodings as possible: regular debugging infomation makes this
1456 redundant. */
1457 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1458 {
1459 tree marker = make_node (RECORD_TYPE);
1460 tree orig_name = TYPE_IDENTIFIER (type);
1461
1462 TYPE_NAME (marker) = concat_name (name, "XVS");
1463 finish_record_type (marker,
1464 create_field_decl (orig_name,
1465 build_reference_type (type),
1466 marker, NULL_TREE, NULL_TREE,
1467 0, 0),
1468 0, true);
1469 TYPE_SIZE_UNIT (marker) = size_unit;
1470
1471 add_parallel_type (record, marker);
1472 }
1473 }
1474
1475 built:
1476 /* If a simple size was explicitly given, maybe issue a warning. */
1477 if (!size
1478 || TREE_CODE (size) == COND_EXPR
1479 || TREE_CODE (size) == MAX_EXPR
1480 || No (gnat_entity))
1481 return record;
1482
1483 /* But don't do it if we are just annotating types and the type is tagged or
1484 concurrent, since these types aren't fully laid out in this mode. */
1485 if (type_annotate_only)
1486 {
1487 Entity_Id gnat_type
1488 = is_component_type
1489 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1490
1491 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1492 return record;
1493 }
1494
1495 /* Take the original size as the maximum size of the input if there was an
1496 unconstrained record involved and round it up to the specified alignment,
1497 if one was specified, but only for aggregate types. */
1498 if (CONTAINS_PLACEHOLDER_P (orig_size))
1499 orig_size = max_size (orig_size, true);
1500
1501 if (align && AGGREGATE_TYPE_P (type))
1502 orig_size = round_up (orig_size, align);
1503
1504 if (!operand_equal_p (size, orig_size, 0)
1505 && !(TREE_CODE (size) == INTEGER_CST
1506 && TREE_CODE (orig_size) == INTEGER_CST
1507 && (TREE_OVERFLOW (size)
1508 || TREE_OVERFLOW (orig_size)
1509 || tree_int_cst_lt (size, orig_size))))
1510 {
1511 Node_Id gnat_error_node;
1512
1513 /* For a packed array, post the message on the original array type. */
1514 if (Is_Packed_Array_Impl_Type (gnat_entity))
1515 gnat_entity = Original_Array_Type (gnat_entity);
1516
1517 if ((Ekind (gnat_entity) == E_Component
1518 || Ekind (gnat_entity) == E_Discriminant)
1519 && Present (Component_Clause (gnat_entity)))
1520 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1521 else if (Has_Size_Clause (gnat_entity))
1522 gnat_error_node = Expression (Size_Clause (gnat_entity));
1523 else if (Has_Object_Size_Clause (gnat_entity))
1524 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1525 else
1526 gnat_error_node = Empty;
1527
1528 /* Generate message only for entities that come from source, since
1529 if we have an entity created by expansion, the message will be
1530 generated for some other corresponding source entity. */
1531 if (Comes_From_Source (gnat_entity))
1532 {
1533 if (Present (gnat_error_node))
1534 post_error_ne_tree ("{^ }bits of & unused?",
1535 gnat_error_node, gnat_entity,
1536 size_diffop (size, orig_size));
1537 else if (is_component_type)
1538 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1539 gnat_entity, gnat_entity,
1540 size_diffop (size, orig_size));
1541 }
1542 }
1543
1544 return record;
1545 }
1546
1547 /* Return true if padded TYPE was built with an RM size. */
1548
1549 bool
1550 pad_type_has_rm_size (tree type)
1551 {
1552 /* This is required for the lookup. */
1553 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1554 return false;
1555
1556 const hashval_t hashcode = hash_pad_type (type);
1557 struct pad_type_hash in, *h;
1558
1559 in.hash = hashcode;
1560 in.type = type;
1561 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1562
1563 /* The types built with an RM size are the canonicalized ones. */
1564 return h && h->type == type;
1565 }
1566
1567 /* Return a copy of the padded TYPE but with reverse storage order. */
1568
1569 tree
1570 set_reverse_storage_order_on_pad_type (tree type)
1571 {
1572 if (flag_checking)
1573 {
1574 /* If the inner type is not scalar then the function does nothing. */
1575 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1576 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1577 && !VECTOR_TYPE_P (inner_type));
1578 }
1579
1580 /* This is required for the canonicalization. */
1581 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1582
1583 tree field = copy_node (TYPE_FIELDS (type));
1584 type = copy_type (type);
1585 DECL_CONTEXT (field) = type;
1586 TYPE_FIELDS (type) = field;
1587 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1588 return canonicalize_pad_type (type);
1589 }
1590 \f
1591 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1592 If this is a multi-dimensional array type, do this recursively.
1593
1594 OP may be
1595 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1596 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1597 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1598
1599 void
1600 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1601 {
1602 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1603 of a one-dimensional array, since the padding has the same alias set
1604 as the field type, but if it's a multi-dimensional array, we need to
1605 see the inner types. */
1606 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1607 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1608 || TYPE_PADDING_P (gnu_old_type)))
1609 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1610
1611 /* Unconstrained array types are deemed incomplete and would thus be given
1612 alias set 0. Retrieve the underlying array type. */
1613 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1614 gnu_old_type
1615 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1616 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1617 gnu_new_type
1618 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1619
1620 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1621 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1622 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1623 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1624
1625 switch (op)
1626 {
1627 case ALIAS_SET_COPY:
1628 /* The alias set shouldn't be copied between array types with different
1629 aliasing settings because this can break the aliasing relationship
1630 between the array type and its element type. */
1631 if (flag_checking || flag_strict_aliasing)
1632 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1633 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1634 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1635 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1636
1637 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1638 break;
1639
1640 case ALIAS_SET_SUBSET:
1641 case ALIAS_SET_SUPERSET:
1642 {
1643 alias_set_type old_set = get_alias_set (gnu_old_type);
1644 alias_set_type new_set = get_alias_set (gnu_new_type);
1645
1646 /* Do nothing if the alias sets conflict. This ensures that we
1647 never call record_alias_subset several times for the same pair
1648 or at all for alias set 0. */
1649 if (!alias_sets_conflict_p (old_set, new_set))
1650 {
1651 if (op == ALIAS_SET_SUBSET)
1652 record_alias_subset (old_set, new_set);
1653 else
1654 record_alias_subset (new_set, old_set);
1655 }
1656 }
1657 break;
1658
1659 default:
1660 gcc_unreachable ();
1661 }
1662
1663 record_component_aliases (gnu_new_type);
1664 }
1665 \f
1666 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1667 ARTIFICIAL_P is true if the type was generated by the compiler. */
1668
1669 void
1670 record_builtin_type (const char *name, tree type, bool artificial_p)
1671 {
1672 tree type_decl = build_decl (input_location,
1673 TYPE_DECL, get_identifier (name), type);
1674 DECL_ARTIFICIAL (type_decl) = artificial_p;
1675 TYPE_ARTIFICIAL (type) = artificial_p;
1676 gnat_pushdecl (type_decl, Empty);
1677
1678 if (debug_hooks->type_decl)
1679 debug_hooks->type_decl (type_decl, false);
1680 }
1681 \f
1682 /* Finish constructing the character type CHAR_TYPE.
1683
1684 In Ada character types are enumeration types and, as a consequence, are
1685 represented in the front-end by integral types holding the positions of
1686 the enumeration values as defined by the language, which means that the
1687 integral types are unsigned.
1688
1689 Unfortunately the signedness of 'char' in C is implementation-defined
1690 and GCC even has the option -f[un]signed-char to toggle it at run time.
1691 Since GNAT's philosophy is to be compatible with C by default, to wit
1692 Interfaces.C.char is defined as a mere copy of Character, we may need
1693 to declare character types as signed types in GENERIC and generate the
1694 necessary adjustments to make them behave as unsigned types.
1695
1696 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1697 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1698 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1699 types. The idea is to ensure that the bit pattern contained in the
1700 Esize'd objects is not changed, even though the numerical value will
1701 be interpreted differently depending on the signedness. */
1702
1703 void
1704 finish_character_type (tree char_type)
1705 {
1706 if (TYPE_UNSIGNED (char_type))
1707 return;
1708
1709 /* Make a copy of a generic unsigned version since we'll modify it. */
1710 tree unsigned_char_type
1711 = (char_type == char_type_node
1712 ? unsigned_char_type_node
1713 : copy_type (gnat_unsigned_type_for (char_type)));
1714
1715 /* Create an unsigned version of the type and set it as debug type. */
1716 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1717 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1718 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1719 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1720
1721 /* If this is a subtype, make the debug type a subtype of the debug type
1722 of the base type and convert literal RM bounds to unsigned. */
1723 if (TREE_TYPE (char_type))
1724 {
1725 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1726 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1727 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1728
1729 if (TREE_CODE (min_value) == INTEGER_CST)
1730 min_value = fold_convert (base_unsigned_char_type, min_value);
1731 if (TREE_CODE (max_value) == INTEGER_CST)
1732 max_value = fold_convert (base_unsigned_char_type, max_value);
1733
1734 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1735 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1736 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1737 }
1738
1739 /* Adjust the RM bounds of the original type to unsigned; that's especially
1740 important for types since they are implicit in this case. */
1741 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1742 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1743 }
1744
1745 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1746 finish constructing the record type as a fat pointer type. */
1747
1748 void
1749 finish_fat_pointer_type (tree record_type, tree field_list)
1750 {
1751 /* Make sure we can put it into a register. */
1752 if (STRICT_ALIGNMENT)
1753 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1754
1755 /* Show what it really is. */
1756 TYPE_FAT_POINTER_P (record_type) = 1;
1757
1758 /* Do not emit debug info for it since the types of its fields may still be
1759 incomplete at this point. */
1760 finish_record_type (record_type, field_list, 0, false);
1761
1762 /* Force type_contains_placeholder_p to return true on it. Although the
1763 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1764 type but the representation of the unconstrained array. */
1765 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1766 }
1767
1768 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1769 finish constructing the record or union type. If REP_LEVEL is zero, this
1770 record has no representation clause and so will be entirely laid out here.
1771 If REP_LEVEL is one, this record has a representation clause and has been
1772 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1773 this record is derived from a parent record and thus inherits its layout;
1774 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1775 additional debug info needs to be output for this type. */
1776
1777 void
1778 finish_record_type (tree record_type, tree field_list, int rep_level,
1779 bool debug_info_p)
1780 {
1781 enum tree_code code = TREE_CODE (record_type);
1782 tree name = TYPE_IDENTIFIER (record_type);
1783 tree ada_size = bitsize_zero_node;
1784 tree size = bitsize_zero_node;
1785 bool had_size = TYPE_SIZE (record_type) != 0;
1786 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1787 bool had_align = TYPE_ALIGN (record_type) != 0;
1788 tree field;
1789
1790 TYPE_FIELDS (record_type) = field_list;
1791
1792 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1793 generate debug info and have a parallel type. */
1794 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1795
1796 /* Globally initialize the record first. If this is a rep'ed record,
1797 that just means some initializations; otherwise, layout the record. */
1798 if (rep_level > 0)
1799 {
1800 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1801 TYPE_ALIGN (record_type)));
1802
1803 if (!had_size_unit)
1804 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1805
1806 if (!had_size)
1807 TYPE_SIZE (record_type) = bitsize_zero_node;
1808
1809 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1810 out just like a UNION_TYPE, since the size will be fixed. */
1811 else if (code == QUAL_UNION_TYPE)
1812 code = UNION_TYPE;
1813 }
1814 else
1815 {
1816 /* Ensure there isn't a size already set. There can be in an error
1817 case where there is a rep clause but all fields have errors and
1818 no longer have a position. */
1819 TYPE_SIZE (record_type) = 0;
1820
1821 /* Ensure we use the traditional GCC layout for bitfields when we need
1822 to pack the record type or have a representation clause. The other
1823 possible layout (Microsoft C compiler), if available, would prevent
1824 efficient packing in almost all cases. */
1825 #ifdef TARGET_MS_BITFIELD_LAYOUT
1826 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1827 decl_attributes (&record_type,
1828 tree_cons (get_identifier ("gcc_struct"),
1829 NULL_TREE, NULL_TREE),
1830 ATTR_FLAG_TYPE_IN_PLACE);
1831 #endif
1832
1833 layout_type (record_type);
1834 }
1835
1836 /* At this point, the position and size of each field is known. It was
1837 either set before entry by a rep clause, or by laying out the type above.
1838
1839 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1840 to compute the Ada size; the GCC size and alignment (for rep'ed records
1841 that are not padding types); and the mode (for rep'ed records). We also
1842 clear the DECL_BIT_FIELD indication for the cases we know have not been
1843 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1844
1845 if (code == QUAL_UNION_TYPE)
1846 field_list = nreverse (field_list);
1847
1848 for (field = field_list; field; field = DECL_CHAIN (field))
1849 {
1850 tree type = TREE_TYPE (field);
1851 tree pos = bit_position (field);
1852 tree this_size = DECL_SIZE (field);
1853 tree this_ada_size;
1854
1855 if (RECORD_OR_UNION_TYPE_P (type)
1856 && !TYPE_FAT_POINTER_P (type)
1857 && !TYPE_CONTAINS_TEMPLATE_P (type)
1858 && TYPE_ADA_SIZE (type))
1859 this_ada_size = TYPE_ADA_SIZE (type);
1860 else
1861 this_ada_size = this_size;
1862
1863 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1864 if (DECL_BIT_FIELD (field)
1865 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1866 {
1867 unsigned int align = TYPE_ALIGN (type);
1868
1869 /* In the general case, type alignment is required. */
1870 if (value_factor_p (pos, align))
1871 {
1872 /* The enclosing record type must be sufficiently aligned.
1873 Otherwise, if no alignment was specified for it and it
1874 has been laid out already, bump its alignment to the
1875 desired one if this is compatible with its size and
1876 maximum alignment, if any. */
1877 if (TYPE_ALIGN (record_type) >= align)
1878 {
1879 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1880 DECL_BIT_FIELD (field) = 0;
1881 }
1882 else if (!had_align
1883 && rep_level == 0
1884 && value_factor_p (TYPE_SIZE (record_type), align)
1885 && (!TYPE_MAX_ALIGN (record_type)
1886 || TYPE_MAX_ALIGN (record_type) >= align))
1887 {
1888 SET_TYPE_ALIGN (record_type, align);
1889 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1890 DECL_BIT_FIELD (field) = 0;
1891 }
1892 }
1893
1894 /* In the non-strict alignment case, only byte alignment is. */
1895 if (!STRICT_ALIGNMENT
1896 && DECL_BIT_FIELD (field)
1897 && value_factor_p (pos, BITS_PER_UNIT))
1898 DECL_BIT_FIELD (field) = 0;
1899 }
1900
1901 /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
1902 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
1903 the variant part is always the last field in the list. */
1904 if (DECL_INTERNAL_P (field)
1905 && TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
1906 && integer_zerop (pos))
1907 DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
1908
1909 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1910 field is technically not addressable. Except that it can actually
1911 be addressed if it is BLKmode and happens to be properly aligned. */
1912 if (DECL_BIT_FIELD (field)
1913 && !(DECL_MODE (field) == BLKmode
1914 && value_factor_p (pos, BITS_PER_UNIT)))
1915 DECL_NONADDRESSABLE_P (field) = 1;
1916
1917 /* A type must be as aligned as its most aligned field that is not
1918 a bit-field. But this is already enforced by layout_type. */
1919 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1920 SET_TYPE_ALIGN (record_type,
1921 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1922
1923 switch (code)
1924 {
1925 case UNION_TYPE:
1926 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1927 size = size_binop (MAX_EXPR, size, this_size);
1928 break;
1929
1930 case QUAL_UNION_TYPE:
1931 ada_size
1932 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1933 this_ada_size, ada_size);
1934 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1935 this_size, size);
1936 break;
1937
1938 case RECORD_TYPE:
1939 /* Since we know here that all fields are sorted in order of
1940 increasing bit position, the size of the record is one
1941 higher than the ending bit of the last field processed
1942 unless we have a rep clause, since in that case we might
1943 have a field outside a QUAL_UNION_TYPE that has a higher ending
1944 position. So use a MAX in that case. Also, if this field is a
1945 QUAL_UNION_TYPE, we need to take into account the previous size in
1946 the case of empty variants. */
1947 ada_size
1948 = merge_sizes (ada_size, pos, this_ada_size,
1949 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1950 size
1951 = merge_sizes (size, pos, this_size,
1952 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1953 break;
1954
1955 default:
1956 gcc_unreachable ();
1957 }
1958 }
1959
1960 if (code == QUAL_UNION_TYPE)
1961 nreverse (field_list);
1962
1963 /* We need to set the regular sizes if REP_LEVEL is one. */
1964 if (rep_level == 1)
1965 {
1966 /* If this is a padding record, we never want to make the size smaller
1967 than what was specified in it, if any. */
1968 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1969 size = TYPE_SIZE (record_type);
1970
1971 tree size_unit = had_size_unit
1972 ? TYPE_SIZE_UNIT (record_type)
1973 : convert (sizetype,
1974 size_binop (CEIL_DIV_EXPR, size,
1975 bitsize_unit_node));
1976 const unsigned int align = TYPE_ALIGN (record_type);
1977
1978 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1979 TYPE_SIZE_UNIT (record_type)
1980 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1981 }
1982
1983 /* We need to set the Ada size if REP_LEVEL is zero or one. */
1984 if (rep_level < 2)
1985 {
1986 /* Now set any of the values we've just computed that apply. */
1987 if (!TYPE_FAT_POINTER_P (record_type)
1988 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1989 SET_TYPE_ADA_SIZE (record_type, ada_size);
1990 }
1991
1992 /* We need to set the mode if REP_LEVEL is one or two. */
1993 if (rep_level > 0)
1994 {
1995 compute_record_mode (record_type);
1996 finish_bitfield_layout (record_type);
1997 }
1998
1999 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
2000 TYPE_MAX_ALIGN (record_type) = 0;
2001
2002 if (debug_info_p)
2003 rest_of_record_type_compilation (record_type);
2004 }
2005
2006 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
2007 PARRALEL_TYPE has no context and its computation is not deferred yet, also
2008 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2009 moment TYPE will get a context. */
2010
2011 void
2012 add_parallel_type (tree type, tree parallel_type)
2013 {
2014 tree decl = TYPE_STUB_DECL (type);
2015
2016 while (DECL_PARALLEL_TYPE (decl))
2017 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2018
2019 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2020
2021 /* If PARALLEL_TYPE already has a context, we are done. */
2022 if (TYPE_CONTEXT (parallel_type))
2023 return;
2024
2025 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2026 it to PARALLEL_TYPE. */
2027 if (TYPE_CONTEXT (type))
2028 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2029
2030 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2031 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2032 so we have nothing to do in this case. */
2033 }
2034
2035 /* Return true if TYPE has a parallel type. */
2036
2037 static bool
2038 has_parallel_type (tree type)
2039 {
2040 tree decl = TYPE_STUB_DECL (type);
2041
2042 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2043 }
2044
2045 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2046 associated with it. It need not be invoked directly in most cases as
2047 finish_record_type takes care of doing so. */
2048
2049 void
2050 rest_of_record_type_compilation (tree record_type)
2051 {
2052 bool var_size = false;
2053 tree field;
2054
2055 /* If this is a padded type, the bulk of the debug info has already been
2056 generated for the field's type. */
2057 if (TYPE_IS_PADDING_P (record_type))
2058 return;
2059
2060 /* If the type already has a parallel type (XVS type), then we're done. */
2061 if (has_parallel_type (record_type))
2062 return;
2063
2064 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2065 {
2066 /* We need to make an XVE/XVU record if any field has variable size,
2067 whether or not the record does. For example, if we have a union,
2068 it may be that all fields, rounded up to the alignment, have the
2069 same size, in which case we'll use that size. But the debug
2070 output routines (except Dwarf2) won't be able to output the fields,
2071 so we need to make the special record. */
2072 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2073 /* If a field has a non-constant qualifier, the record will have
2074 variable size too. */
2075 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2076 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2077 {
2078 var_size = true;
2079 break;
2080 }
2081 }
2082
2083 /* If this record type is of variable size, make a parallel record type that
2084 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2085 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2086 {
2087 tree new_record_type
2088 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2089 ? UNION_TYPE : TREE_CODE (record_type));
2090 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2091 tree last_pos = bitsize_zero_node;
2092 tree old_field, prev_old_field = NULL_TREE;
2093
2094 new_name
2095 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2096 ? "XVU" : "XVE");
2097 TYPE_NAME (new_record_type) = new_name;
2098 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2099 TYPE_STUB_DECL (new_record_type)
2100 = create_type_stub_decl (new_name, new_record_type);
2101 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2102 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2103 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2104 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2105 TYPE_SIZE_UNIT (new_record_type)
2106 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2107
2108 /* Now scan all the fields, replacing each field with a new field
2109 corresponding to the new encoding. */
2110 for (old_field = TYPE_FIELDS (record_type); old_field;
2111 old_field = DECL_CHAIN (old_field))
2112 {
2113 tree field_type = TREE_TYPE (old_field);
2114 tree field_name = DECL_NAME (old_field);
2115 tree curpos = fold_bit_position (old_field);
2116 tree pos, new_field;
2117 bool var = false;
2118 unsigned int align = 0;
2119
2120 /* See how the position was modified from the last position.
2121
2122 There are two basic cases we support: a value was added
2123 to the last position or the last position was rounded to
2124 a boundary and they something was added. Check for the
2125 first case first. If not, see if there is any evidence
2126 of rounding. If so, round the last position and retry.
2127
2128 If this is a union, the position can be taken as zero. */
2129 if (TREE_CODE (new_record_type) == UNION_TYPE)
2130 pos = bitsize_zero_node;
2131 else
2132 pos = compute_related_constant (curpos, last_pos);
2133
2134 if (!pos
2135 && TREE_CODE (curpos) == MULT_EXPR
2136 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2137 {
2138 tree offset = TREE_OPERAND (curpos, 0);
2139 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2140 align = scale_by_factor_of (offset, align);
2141 last_pos = round_up (last_pos, align);
2142 pos = compute_related_constant (curpos, last_pos);
2143 }
2144 else if (!pos
2145 && TREE_CODE (curpos) == PLUS_EXPR
2146 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2147 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2148 && tree_fits_uhwi_p
2149 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2150 {
2151 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2152 unsigned HOST_WIDE_INT addend
2153 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2154 align
2155 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2156 align = scale_by_factor_of (offset, align);
2157 align = MIN (align, addend & -addend);
2158 last_pos = round_up (last_pos, align);
2159 pos = compute_related_constant (curpos, last_pos);
2160 }
2161 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2162 {
2163 align = TYPE_ALIGN (field_type);
2164 last_pos = round_up (last_pos, align);
2165 pos = compute_related_constant (curpos, last_pos);
2166 }
2167
2168 /* If we can't compute a position, set it to zero.
2169
2170 ??? We really should abort here, but it's too much work
2171 to get this correct for all cases. */
2172 if (!pos)
2173 pos = bitsize_zero_node;
2174
2175 /* See if this type is variable-sized and make a pointer type
2176 and indicate the indirection if so. Beware that the debug
2177 back-end may adjust the position computed above according
2178 to the alignment of the field type, i.e. the pointer type
2179 in this case, if we don't preventively counter that. */
2180 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2181 {
2182 field_type = build_pointer_type (field_type);
2183 if (align != 0 && TYPE_ALIGN (field_type) > align)
2184 {
2185 field_type = copy_type (field_type);
2186 SET_TYPE_ALIGN (field_type, align);
2187 }
2188 var = true;
2189 }
2190
2191 /* Make a new field name, if necessary. */
2192 if (var || align != 0)
2193 {
2194 char suffix[16];
2195
2196 if (align != 0)
2197 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2198 align / BITS_PER_UNIT);
2199 else
2200 strcpy (suffix, "XVL");
2201
2202 field_name = concat_name (field_name, suffix);
2203 }
2204
2205 new_field
2206 = create_field_decl (field_name, field_type, new_record_type,
2207 DECL_SIZE (old_field), pos, 0, 0);
2208 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2209 TYPE_FIELDS (new_record_type) = new_field;
2210
2211 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2212 zero. The only time it's not the last field of the record
2213 is when there are other components at fixed positions after
2214 it (meaning there was a rep clause for every field) and we
2215 want to be able to encode them. */
2216 last_pos = size_binop (PLUS_EXPR, curpos,
2217 (TREE_CODE (TREE_TYPE (old_field))
2218 == QUAL_UNION_TYPE)
2219 ? bitsize_zero_node
2220 : DECL_SIZE (old_field));
2221 prev_old_field = old_field;
2222 }
2223
2224 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2225
2226 add_parallel_type (record_type, new_record_type);
2227 }
2228 }
2229
2230 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2231 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2232 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2233 replace a value of zero with the old size. If HAS_REP is true, we take the
2234 MAX of the end position of this field with LAST_SIZE. In all other cases,
2235 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2236
2237 static tree
2238 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2239 bool has_rep)
2240 {
2241 tree type = TREE_TYPE (last_size);
2242 tree new_size;
2243
2244 if (!special || TREE_CODE (size) != COND_EXPR)
2245 {
2246 new_size = size_binop (PLUS_EXPR, first_bit, size);
2247 if (has_rep)
2248 new_size = size_binop (MAX_EXPR, last_size, new_size);
2249 }
2250
2251 else
2252 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2253 integer_zerop (TREE_OPERAND (size, 1))
2254 ? last_size : merge_sizes (last_size, first_bit,
2255 TREE_OPERAND (size, 1),
2256 1, has_rep),
2257 integer_zerop (TREE_OPERAND (size, 2))
2258 ? last_size : merge_sizes (last_size, first_bit,
2259 TREE_OPERAND (size, 2),
2260 1, has_rep));
2261
2262 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2263 when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2264 size is not constant. */
2265 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2266 new_size = TREE_OPERAND (new_size, 0);
2267
2268 return new_size;
2269 }
2270
2271 /* Return the bit position of FIELD, in bits from the start of the record,
2272 and fold it as much as possible. This is a tree of type bitsizetype. */
2273
2274 static tree
2275 fold_bit_position (const_tree field)
2276 {
2277 tree offset = DECL_FIELD_OFFSET (field);
2278 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2279 offset = size_binop (TREE_CODE (offset),
2280 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2281 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2282 else
2283 offset = fold_convert (bitsizetype, offset);
2284 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2285 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2286 }
2287
2288 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2289 related by the addition of a constant. Return that constant if so. */
2290
2291 static tree
2292 compute_related_constant (tree op0, tree op1)
2293 {
2294 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2295
2296 if (TREE_CODE (op0) == MULT_EXPR
2297 && TREE_CODE (op1) == MULT_EXPR
2298 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2299 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2300 {
2301 factor = TREE_OPERAND (op0, 1);
2302 op0 = TREE_OPERAND (op0, 0);
2303 op1 = TREE_OPERAND (op1, 0);
2304 }
2305 else
2306 factor = NULL_TREE;
2307
2308 op0_cst = split_plus (op0, &op0_var);
2309 op1_cst = split_plus (op1, &op1_var);
2310 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2311
2312 if (operand_equal_p (op0_var, op1_var, 0))
2313 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2314
2315 return NULL_TREE;
2316 }
2317
2318 /* Utility function of above to split a tree OP which may be a sum, into a
2319 constant part, which is returned, and a variable part, which is stored
2320 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2321 bitsizetype. */
2322
2323 static tree
2324 split_plus (tree in, tree *pvar)
2325 {
2326 /* Strip conversions in order to ease the tree traversal and maximize the
2327 potential for constant or plus/minus discovery. We need to be careful
2328 to always return and set *pvar to bitsizetype trees, but it's worth
2329 the effort. */
2330 in = remove_conversions (in, false);
2331
2332 *pvar = convert (bitsizetype, in);
2333
2334 if (TREE_CODE (in) == INTEGER_CST)
2335 {
2336 *pvar = bitsize_zero_node;
2337 return convert (bitsizetype, in);
2338 }
2339 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2340 {
2341 tree lhs_var, rhs_var;
2342 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2343 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2344
2345 if (lhs_var == TREE_OPERAND (in, 0)
2346 && rhs_var == TREE_OPERAND (in, 1))
2347 return bitsize_zero_node;
2348
2349 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2350 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2351 }
2352 else
2353 return bitsize_zero_node;
2354 }
2355 \f
2356 /* Return a copy of TYPE but safe to modify in any way. */
2357
2358 tree
2359 copy_type (tree type)
2360 {
2361 tree new_type = copy_node (type);
2362
2363 /* Unshare the language-specific data. */
2364 if (TYPE_LANG_SPECIFIC (type))
2365 {
2366 TYPE_LANG_SPECIFIC (new_type) = NULL;
2367 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2368 }
2369
2370 /* And the contents of the language-specific slot if needed. */
2371 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2372 && TYPE_RM_VALUES (type))
2373 {
2374 TYPE_RM_VALUES (new_type) = NULL_TREE;
2375 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2376 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2377 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2378 }
2379
2380 /* copy_node clears this field instead of copying it, because it is
2381 aliased with TREE_CHAIN. */
2382 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2383
2384 TYPE_POINTER_TO (new_type) = NULL_TREE;
2385 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2386 TYPE_MAIN_VARIANT (new_type) = new_type;
2387 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2388 TYPE_CANONICAL (new_type) = new_type;
2389
2390 return new_type;
2391 }
2392 \f
2393 /* Return a subtype of sizetype with range MIN to MAX and whose
2394 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2395 of the associated TYPE_DECL. */
2396
2397 tree
2398 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2399 {
2400 /* First build a type for the desired range. */
2401 tree type = build_nonshared_range_type (sizetype, min, max);
2402
2403 /* Then set the index type. */
2404 SET_TYPE_INDEX_TYPE (type, index);
2405 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2406
2407 return type;
2408 }
2409
2410 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2411 sizetype is used. */
2412
2413 tree
2414 create_range_type (tree type, tree min, tree max)
2415 {
2416 tree range_type;
2417
2418 if (!type)
2419 type = sizetype;
2420
2421 /* First build a type with the base range. */
2422 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2423 TYPE_MAX_VALUE (type));
2424
2425 /* Then set the actual range. */
2426 SET_TYPE_RM_MIN_VALUE (range_type, min);
2427 SET_TYPE_RM_MAX_VALUE (range_type, max);
2428
2429 return range_type;
2430 }
2431 \f
2432 \f/* Return an extra subtype of TYPE with range MIN to MAX. */
2433
2434 tree
2435 create_extra_subtype (tree type, tree min, tree max)
2436 {
2437 const bool uns = TYPE_UNSIGNED (type);
2438 const unsigned prec = TYPE_PRECISION (type);
2439 tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2440
2441 TREE_TYPE (subtype) = type;
2442 TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2443
2444 SET_TYPE_RM_MIN_VALUE (subtype, min);
2445 SET_TYPE_RM_MAX_VALUE (subtype, max);
2446
2447 return subtype;
2448 }
2449 \f
2450 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2451 NAME gives the name of the type to be used in the declaration. */
2452
2453 tree
2454 create_type_stub_decl (tree name, tree type)
2455 {
2456 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2457 DECL_ARTIFICIAL (type_decl) = 1;
2458 TYPE_ARTIFICIAL (type) = 1;
2459 return type_decl;
2460 }
2461
2462 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2463 used in the declaration. ARTIFICIAL_P is true if the declaration was
2464 generated by the compiler. DEBUG_INFO_P is true if we need to write
2465 debug information about this type. GNAT_NODE is used for the position
2466 of the decl. */
2467
2468 tree
2469 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2470 Node_Id gnat_node)
2471 {
2472 enum tree_code code = TREE_CODE (type);
2473 bool is_named
2474 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2475 tree type_decl;
2476
2477 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2478 gcc_assert (!TYPE_IS_DUMMY_P (type));
2479
2480 /* If the type hasn't been named yet, we're naming it; preserve an existing
2481 TYPE_STUB_DECL that has been attached to it for some purpose. */
2482 if (!is_named && TYPE_STUB_DECL (type))
2483 {
2484 type_decl = TYPE_STUB_DECL (type);
2485 DECL_NAME (type_decl) = name;
2486 }
2487 else
2488 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2489
2490 DECL_ARTIFICIAL (type_decl) = artificial_p;
2491 TYPE_ARTIFICIAL (type) = artificial_p;
2492
2493 /* Add this decl to the current binding level. */
2494 gnat_pushdecl (type_decl, gnat_node);
2495
2496 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2497 causes the name to be also viewed as a "tag" by the debug back-end, with
2498 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2499 types in DWARF.
2500
2501 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2502 from multiple contexts, and "type_decl" references a copy of it: in such a
2503 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2504 with the mechanism above. */
2505 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2506 TYPE_STUB_DECL (type) = type_decl;
2507
2508 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2509 back-end doesn't support, and for others if we don't need to. */
2510 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2511 DECL_IGNORED_P (type_decl) = 1;
2512
2513 return type_decl;
2514 }
2515 \f
2516 /* Return a VAR_DECL or CONST_DECL node.
2517
2518 NAME gives the name of the variable. ASM_NAME is its assembler name
2519 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2520 the GCC tree for an optional initial expression; NULL_TREE if none.
2521
2522 CONST_FLAG is true if this variable is constant, in which case we might
2523 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2524
2525 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2526 definition to be made visible outside of the current compilation unit, for
2527 instance variable definitions in a package specification.
2528
2529 EXTERN_FLAG is true when processing an external variable declaration (as
2530 opposed to a definition: no storage is to be allocated for the variable).
2531
2532 STATIC_FLAG is only relevant when not at top level and indicates whether
2533 to always allocate storage to the variable.
2534
2535 VOLATILE_FLAG is true if this variable is declared as volatile.
2536
2537 ARTIFICIAL_P is true if the variable was generated by the compiler.
2538
2539 DEBUG_INFO_P is true if we need to write debug information for it.
2540
2541 ATTR_LIST is the list of attributes to be attached to the variable.
2542
2543 GNAT_NODE is used for the position of the decl. */
2544
2545 tree
2546 create_var_decl (tree name, tree asm_name, tree type, tree init,
2547 bool const_flag, bool public_flag, bool extern_flag,
2548 bool static_flag, bool volatile_flag, bool artificial_p,
2549 bool debug_info_p, struct attrib *attr_list,
2550 Node_Id gnat_node, bool const_decl_allowed_p)
2551 {
2552 /* Whether the object has static storage duration, either explicitly or by
2553 virtue of being declared at the global level. */
2554 const bool static_storage = static_flag || global_bindings_p ();
2555
2556 /* Whether the initializer is constant: for an external object or an object
2557 with static storage duration, we check that the initializer is a valid
2558 constant expression for initializing a static variable; otherwise, we
2559 only check that it is constant. */
2560 const bool init_const
2561 = (init
2562 && gnat_types_compatible_p (type, TREE_TYPE (init))
2563 && (extern_flag || static_storage
2564 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2565 != NULL_TREE
2566 : TREE_CONSTANT (init)));
2567
2568 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2569 case the initializer may be used in lieu of the DECL node (as done in
2570 Identifier_to_gnu). This is useful to prevent the need of elaboration
2571 code when an identifier for which such a DECL is made is in turn used
2572 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2573 but extra constraints apply to this choice (see below) and they are not
2574 relevant to the distinction we wish to make. */
2575 const bool constant_p = const_flag && init_const;
2576
2577 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2578 and may be used for scalars in general but not for aggregates. */
2579 tree var_decl
2580 = build_decl (input_location,
2581 (constant_p
2582 && const_decl_allowed_p
2583 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2584 name, type);
2585
2586 /* Detect constants created by the front-end to hold 'reference to function
2587 calls for stabilization purposes. This is needed for renaming. */
2588 if (const_flag && init && POINTER_TYPE_P (type))
2589 {
2590 tree inner = init;
2591 if (TREE_CODE (inner) == COMPOUND_EXPR)
2592 inner = TREE_OPERAND (inner, 1);
2593 inner = remove_conversions (inner, true);
2594 if (TREE_CODE (inner) == ADDR_EXPR
2595 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2596 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2597 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2598 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2599 DECL_RETURN_VALUE_P (var_decl) = 1;
2600 }
2601
2602 /* If this is external, throw away any initializations (they will be done
2603 elsewhere) unless this is a constant for which we would like to remain
2604 able to get the initializer. If we are defining a global here, leave a
2605 constant initialization and save any variable elaborations for the
2606 elaboration routine. If we are just annotating types, throw away the
2607 initialization if it isn't a constant. */
2608 if ((extern_flag && !constant_p)
2609 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2610 init = NULL_TREE;
2611
2612 /* At the global level, a non-constant initializer generates elaboration
2613 statements. Check that such statements are allowed, that is to say,
2614 not violating a No_Elaboration_Code restriction. */
2615 if (init && !init_const && global_bindings_p ())
2616 Check_Elaboration_Code_Allowed (gnat_node);
2617
2618 /* Attach the initializer, if any. */
2619 DECL_INITIAL (var_decl) = init;
2620
2621 /* Directly set some flags. */
2622 DECL_ARTIFICIAL (var_decl) = artificial_p;
2623 DECL_EXTERNAL (var_decl) = extern_flag;
2624
2625 TREE_CONSTANT (var_decl) = constant_p;
2626 TREE_READONLY (var_decl) = const_flag;
2627
2628 /* The object is public if it is external or if it is declared public
2629 and has static storage duration. */
2630 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2631
2632 /* We need to allocate static storage for an object with static storage
2633 duration if it isn't external. */
2634 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2635
2636 TREE_SIDE_EFFECTS (var_decl)
2637 = TREE_THIS_VOLATILE (var_decl)
2638 = TYPE_VOLATILE (type) | volatile_flag;
2639
2640 if (TREE_SIDE_EFFECTS (var_decl))
2641 TREE_ADDRESSABLE (var_decl) = 1;
2642
2643 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2644 try to fiddle with DECL_COMMON. However, on platforms that don't
2645 support global BSS sections, uninitialized global variables would
2646 go in DATA instead, thus increasing the size of the executable. */
2647 if (!flag_no_common
2648 && TREE_CODE (var_decl) == VAR_DECL
2649 && TREE_PUBLIC (var_decl)
2650 && !have_global_bss_p ())
2651 DECL_COMMON (var_decl) = 1;
2652
2653 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2654 since we will create an associated variable. Likewise for an external
2655 constant whose initializer is not absolute, because this would mean a
2656 global relocation in a read-only section which runs afoul of the PE-COFF
2657 run-time relocation mechanism. */
2658 if (!debug_info_p
2659 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2660 || (extern_flag
2661 && constant_p
2662 && init
2663 && initializer_constant_valid_p (init, TREE_TYPE (init))
2664 != null_pointer_node))
2665 DECL_IGNORED_P (var_decl) = 1;
2666
2667 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2668 if (TREE_CODE (var_decl) == VAR_DECL)
2669 process_attributes (&var_decl, &attr_list, true, gnat_node);
2670
2671 /* Add this decl to the current binding level. */
2672 gnat_pushdecl (var_decl, gnat_node);
2673
2674 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2675 {
2676 /* Let the target mangle the name if this isn't a verbatim asm. */
2677 if (*IDENTIFIER_POINTER (asm_name) != '*')
2678 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2679
2680 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2681 }
2682
2683 return var_decl;
2684 }
2685 \f
2686 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2687
2688 static bool
2689 aggregate_type_contains_array_p (tree type)
2690 {
2691 switch (TREE_CODE (type))
2692 {
2693 case RECORD_TYPE:
2694 case UNION_TYPE:
2695 case QUAL_UNION_TYPE:
2696 {
2697 tree field;
2698 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2699 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2700 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2701 return true;
2702 return false;
2703 }
2704
2705 case ARRAY_TYPE:
2706 return true;
2707
2708 default:
2709 gcc_unreachable ();
2710 }
2711 }
2712
2713 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2714 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2715 is the specified size of the field. If POS is nonzero, it is the bit
2716 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2717 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2718 means we are allowed to take the address of the field; if it is negative,
2719 we should not make a bitfield, which is used by make_aligning_type. */
2720
2721 tree
2722 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2723 int packed, int addressable)
2724 {
2725 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2726
2727 DECL_CONTEXT (field_decl) = record_type;
2728 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2729
2730 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2731 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2732 Likewise for an aggregate without specified position that contains an
2733 array, because in this case slices of variable length of this array
2734 must be handled by GCC and variable-sized objects need to be aligned
2735 to at least a byte boundary. */
2736 if (packed && (TYPE_MODE (type) == BLKmode
2737 || (!pos
2738 && AGGREGATE_TYPE_P (type)
2739 && aggregate_type_contains_array_p (type))))
2740 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2741
2742 /* If a size is specified, use it. Otherwise, if the record type is packed
2743 compute a size to use, which may differ from the object's natural size.
2744 We always set a size in this case to trigger the checks for bitfield
2745 creation below, which is typically required when no position has been
2746 specified. */
2747 if (size)
2748 size = convert (bitsizetype, size);
2749 else if (packed == 1)
2750 {
2751 size = rm_size (type);
2752 if (TYPE_MODE (type) == BLKmode)
2753 size = round_up (size, BITS_PER_UNIT);
2754 }
2755
2756 /* If we may, according to ADDRESSABLE, make a bitfield when the size is
2757 specified for two reasons: first if the size differs from the natural
2758 size; second, if the alignment is insufficient. There are a number of
2759 ways the latter can be true.
2760
2761 We never make a bitfield if the type of the field has a nonconstant size,
2762 because no such entity requiring bitfield operations should reach here.
2763
2764 We do *preventively* make a bitfield when there might be the need for it
2765 but we don't have all the necessary information to decide, as is the case
2766 of a field in a packed record.
2767
2768 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2769 in layout_decl or finish_record_type to clear the bit_field indication if
2770 it is in fact not needed. */
2771 if (addressable >= 0
2772 && size
2773 && TREE_CODE (size) == INTEGER_CST
2774 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2775 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2776 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2777 || packed
2778 || (TYPE_ALIGN (record_type) != 0
2779 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2780 {
2781 DECL_BIT_FIELD (field_decl) = 1;
2782 DECL_SIZE (field_decl) = size;
2783 if (!packed && !pos)
2784 {
2785 if (TYPE_ALIGN (record_type) != 0
2786 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2787 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2788 else
2789 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2790 }
2791 }
2792
2793 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2794
2795 /* Bump the alignment if need be, either for bitfield/packing purposes or
2796 to satisfy the type requirements if no such consideration applies. When
2797 we get the alignment from the type, indicate if this is from an explicit
2798 user request, which prevents stor-layout from lowering it later on. */
2799 {
2800 unsigned int bit_align
2801 = (DECL_BIT_FIELD (field_decl) ? 1
2802 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2803
2804 if (bit_align > DECL_ALIGN (field_decl))
2805 SET_DECL_ALIGN (field_decl, bit_align);
2806 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2807 {
2808 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2809 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2810 }
2811 }
2812
2813 if (pos)
2814 {
2815 /* We need to pass in the alignment the DECL is known to have.
2816 This is the lowest-order bit set in POS, but no more than
2817 the alignment of the record, if one is specified. Note
2818 that an alignment of 0 is taken as infinite. */
2819 unsigned int known_align;
2820
2821 if (tree_fits_uhwi_p (pos))
2822 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2823 else
2824 known_align = BITS_PER_UNIT;
2825
2826 if (TYPE_ALIGN (record_type)
2827 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2828 known_align = TYPE_ALIGN (record_type);
2829
2830 layout_decl (field_decl, known_align);
2831 SET_DECL_OFFSET_ALIGN (field_decl,
2832 tree_fits_uhwi_p (pos)
2833 ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
2834 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2835 &DECL_FIELD_BIT_OFFSET (field_decl),
2836 DECL_OFFSET_ALIGN (field_decl), pos);
2837 }
2838
2839 /* In addition to what our caller says, claim the field is addressable if we
2840 know that its type is not suitable.
2841
2842 The field may also be "technically" nonaddressable, meaning that even if
2843 we attempt to take the field's address we will actually get the address
2844 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2845 value we have at this point is not accurate enough, so we don't account
2846 for this here and let finish_record_type decide. */
2847 if (!addressable && !type_for_nonaliased_component_p (type))
2848 addressable = 1;
2849
2850 /* Note that there is a trade-off in making a field nonaddressable because
2851 this will cause type-based alias analysis to use the same alias set for
2852 accesses to the field as for accesses to the whole record: while doing
2853 so will make it more likely to disambiguate accesses to other objects
2854 and accesses to the field, it will make it less likely to disambiguate
2855 accesses to the other fields of the record and accesses to the field.
2856 If the record is fully static, then the trade-off is irrelevant since
2857 the fields of the record can always be disambiguated by their offsets
2858 but, if the record is dynamic, then it can become problematic. */
2859 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2860
2861 return field_decl;
2862 }
2863 \f
2864 /* Return a PARM_DECL node with NAME and TYPE. */
2865
2866 tree
2867 create_param_decl (tree name, tree type)
2868 {
2869 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2870
2871 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2872 can lead to various ABI violations. */
2873 if (targetm.calls.promote_prototypes (NULL_TREE)
2874 && INTEGRAL_TYPE_P (type)
2875 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2876 {
2877 /* We have to be careful about biased types here. Make a subtype
2878 of integer_type_node with the proper biasing. */
2879 if (TREE_CODE (type) == INTEGER_TYPE
2880 && TYPE_BIASED_REPRESENTATION_P (type))
2881 {
2882 tree subtype
2883 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2884 TREE_TYPE (subtype) = integer_type_node;
2885 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2886 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2887 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2888 type = subtype;
2889 }
2890 else
2891 type = integer_type_node;
2892 }
2893
2894 DECL_ARG_TYPE (param_decl) = type;
2895 return param_decl;
2896 }
2897 \f
2898 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2899 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2900 changed. GNAT_NODE is used for the position of error messages. */
2901
2902 void
2903 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2904 Node_Id gnat_node)
2905 {
2906 struct attrib *attr;
2907
2908 for (attr = *attr_list; attr; attr = attr->next)
2909 switch (attr->type)
2910 {
2911 case ATTR_MACHINE_ATTRIBUTE:
2912 Sloc_to_locus (Sloc (gnat_node), &input_location);
2913 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2914 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2915 break;
2916
2917 case ATTR_LINK_ALIAS:
2918 if (!DECL_EXTERNAL (*node))
2919 {
2920 TREE_STATIC (*node) = 1;
2921 assemble_alias (*node, attr->name);
2922 }
2923 break;
2924
2925 case ATTR_WEAK_EXTERNAL:
2926 if (SUPPORTS_WEAK)
2927 declare_weak (*node);
2928 else
2929 post_error ("?weak declarations not supported on this target",
2930 attr->error_point);
2931 break;
2932
2933 case ATTR_LINK_SECTION:
2934 if (targetm_common.have_named_sections)
2935 {
2936 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2937 DECL_COMMON (*node) = 0;
2938 }
2939 else
2940 post_error ("?section attributes are not supported for this target",
2941 attr->error_point);
2942 break;
2943
2944 case ATTR_LINK_CONSTRUCTOR:
2945 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2946 TREE_USED (*node) = 1;
2947 break;
2948
2949 case ATTR_LINK_DESTRUCTOR:
2950 DECL_STATIC_DESTRUCTOR (*node) = 1;
2951 TREE_USED (*node) = 1;
2952 break;
2953
2954 case ATTR_THREAD_LOCAL_STORAGE:
2955 set_decl_tls_model (*node, decl_default_tls_model (*node));
2956 DECL_COMMON (*node) = 0;
2957 break;
2958 }
2959
2960 *attr_list = NULL;
2961 }
2962
2963 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2964 a power of 2. */
2965
2966 bool
2967 value_factor_p (tree value, HOST_WIDE_INT factor)
2968 {
2969 if (tree_fits_uhwi_p (value))
2970 return tree_to_uhwi (value) % factor == 0;
2971
2972 if (TREE_CODE (value) == MULT_EXPR)
2973 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2974 || value_factor_p (TREE_OPERAND (value, 1), factor));
2975
2976 return false;
2977 }
2978
2979 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2980 feed it with the elaboration of GNAT_SCOPE. */
2981
2982 static struct deferred_decl_context_node *
2983 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2984 {
2985 struct deferred_decl_context_node *new_node;
2986
2987 new_node
2988 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2989 new_node->decl = decl;
2990 new_node->gnat_scope = gnat_scope;
2991 new_node->force_global = force_global;
2992 new_node->types.create (1);
2993 new_node->next = deferred_decl_context_queue;
2994 deferred_decl_context_queue = new_node;
2995 return new_node;
2996 }
2997
2998 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2999 feed it with the DECL_CONTEXT computed as part of N as soon as it is
3000 computed. */
3001
3002 static void
3003 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3004 {
3005 n->types.safe_push (type);
3006 }
3007
3008 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
3009 NULL_TREE if it is not available. */
3010
3011 static tree
3012 compute_deferred_decl_context (Entity_Id gnat_scope)
3013 {
3014 tree context;
3015
3016 if (present_gnu_tree (gnat_scope))
3017 context = get_gnu_tree (gnat_scope);
3018 else
3019 return NULL_TREE;
3020
3021 if (TREE_CODE (context) == TYPE_DECL)
3022 {
3023 const tree context_type = TREE_TYPE (context);
3024
3025 /* Skip dummy types: only the final ones can appear in the context
3026 chain. */
3027 if (TYPE_DUMMY_P (context_type))
3028 return NULL_TREE;
3029
3030 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3031 chain. */
3032 else
3033 context = context_type;
3034 }
3035
3036 return context;
3037 }
3038
3039 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3040 that cannot be processed yet, remove the other ones. If FORCE is true,
3041 force the processing for all nodes, use the global context when nodes don't
3042 have a GNU translation. */
3043
3044 void
3045 process_deferred_decl_context (bool force)
3046 {
3047 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3048 struct deferred_decl_context_node *node;
3049
3050 while (*it)
3051 {
3052 bool processed = false;
3053 tree context = NULL_TREE;
3054 Entity_Id gnat_scope;
3055
3056 node = *it;
3057
3058 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3059 get the first scope. */
3060 gnat_scope = node->gnat_scope;
3061 while (Present (gnat_scope))
3062 {
3063 context = compute_deferred_decl_context (gnat_scope);
3064 if (!force || context)
3065 break;
3066 gnat_scope = get_debug_scope (gnat_scope, NULL);
3067 }
3068
3069 /* Imported declarations must not be in a local context (i.e. not inside
3070 a function). */
3071 if (context && node->force_global > 0)
3072 {
3073 tree ctx = context;
3074
3075 while (ctx)
3076 {
3077 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3078 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3079 }
3080 }
3081
3082 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3083 was no elaborated scope, use the global context. */
3084 if (force && !context)
3085 context = get_global_context ();
3086
3087 if (context)
3088 {
3089 tree t;
3090 int i;
3091
3092 DECL_CONTEXT (node->decl) = context;
3093
3094 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3095 ..._TYPE nodes. */
3096 FOR_EACH_VEC_ELT (node->types, i, t)
3097 {
3098 gnat_set_type_context (t, context);
3099 }
3100 processed = true;
3101 }
3102
3103 /* If this node has been successfuly processed, remove it from the
3104 queue. Then move to the next node. */
3105 if (processed)
3106 {
3107 *it = node->next;
3108 node->types.release ();
3109 free (node);
3110 }
3111 else
3112 it = &node->next;
3113 }
3114 }
3115
3116 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3117
3118 static unsigned int
3119 scale_by_factor_of (tree expr, unsigned int value)
3120 {
3121 unsigned HOST_WIDE_INT addend = 0;
3122 unsigned HOST_WIDE_INT factor = 1;
3123
3124 /* Peel conversions around EXPR and try to extract bodies from function
3125 calls: it is possible to get the scale factor from size functions. */
3126 expr = remove_conversions (expr, true);
3127 if (TREE_CODE (expr) == CALL_EXPR)
3128 expr = maybe_inline_call_in_expr (expr);
3129
3130 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3131 multiple of the scale factor we are looking for. */
3132 if (TREE_CODE (expr) == PLUS_EXPR
3133 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3134 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3135 {
3136 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3137 expr = TREE_OPERAND (expr, 0);
3138 }
3139
3140 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3141 corresponding to the number of trailing zeros of the mask. */
3142 if (TREE_CODE (expr) == BIT_AND_EXPR
3143 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3144 {
3145 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3146 unsigned int i = 0;
3147
3148 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3149 {
3150 mask >>= 1;
3151 factor *= 2;
3152 i++;
3153 }
3154 }
3155
3156 /* If the addend is not a multiple of the factor we found, give up. In
3157 theory we could find a smaller common factor but it's useless for our
3158 needs. This situation arises when dealing with a field F1 with no
3159 alignment requirement but that is following a field F2 with such
3160 requirements. As long as we have F2's offset, we don't need alignment
3161 information to compute F1's. */
3162 if (addend % factor != 0)
3163 factor = 1;
3164
3165 return factor * value;
3166 }
3167
3168 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3169 unless we can prove these 2 fields are laid out in such a way that no gap
3170 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3171 is the distance in bits between the end of PREV_FIELD and the starting
3172 position of CURR_FIELD. It is ignored if null. */
3173
3174 static bool
3175 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3176 {
3177 /* If this is the first field of the record, there cannot be any gap */
3178 if (!prev_field)
3179 return false;
3180
3181 /* If the previous field is a union type, then return false: The only
3182 time when such a field is not the last field of the record is when
3183 there are other components at fixed positions after it (meaning there
3184 was a rep clause for every field), in which case we don't want the
3185 alignment constraint to override them. */
3186 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3187 return false;
3188
3189 /* If the distance between the end of prev_field and the beginning of
3190 curr_field is constant, then there is a gap if the value of this
3191 constant is not null. */
3192 if (offset && tree_fits_uhwi_p (offset))
3193 return !integer_zerop (offset);
3194
3195 /* If the size and position of the previous field are constant,
3196 then check the sum of this size and position. There will be a gap
3197 iff it is not multiple of the current field alignment. */
3198 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3199 && tree_fits_uhwi_p (bit_position (prev_field)))
3200 return ((tree_to_uhwi (bit_position (prev_field))
3201 + tree_to_uhwi (DECL_SIZE (prev_field)))
3202 % DECL_ALIGN (curr_field) != 0);
3203
3204 /* If both the position and size of the previous field are multiples
3205 of the current field alignment, there cannot be any gap. */
3206 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3207 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3208 return false;
3209
3210 /* Fallback, return that there may be a potential gap */
3211 return true;
3212 }
3213
3214 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3215 the decl. */
3216
3217 tree
3218 create_label_decl (tree name, Node_Id gnat_node)
3219 {
3220 tree label_decl
3221 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3222
3223 SET_DECL_MODE (label_decl, VOIDmode);
3224
3225 /* Add this decl to the current binding level. */
3226 gnat_pushdecl (label_decl, gnat_node);
3227
3228 return label_decl;
3229 }
3230 \f
3231 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3232 its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3233 PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3234 chained through the DECL_CHAIN field).
3235
3236 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3237
3238 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3239 definition to be made visible outside of the current compilation unit.
3240
3241 EXTERN_FLAG is true when processing an external subprogram declaration.
3242
3243 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3244
3245 DEBUG_INFO_P is true if we need to write debug information for it.
3246
3247 DEFINITION is true if the subprogram is to be considered as a definition.
3248
3249 ATTR_LIST is the list of attributes to be attached to the subprogram.
3250
3251 GNAT_NODE is used for the position of the decl. */
3252
3253 tree
3254 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3255 enum inline_status_t inline_status, bool public_flag,
3256 bool extern_flag, bool artificial_p, bool debug_info_p,
3257 bool definition, struct attrib *attr_list,
3258 Node_Id gnat_node)
3259 {
3260 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3261 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3262
3263 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3264 DECL_EXTERNAL (subprog_decl) = extern_flag;
3265 DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3266 DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3267 TREE_PUBLIC (subprog_decl) = public_flag;
3268
3269 switch (inline_status)
3270 {
3271 case is_suppressed:
3272 DECL_UNINLINABLE (subprog_decl) = 1;
3273 break;
3274
3275 case is_default:
3276 break;
3277
3278 case is_required:
3279 if (Back_End_Inlining)
3280 {
3281 decl_attributes (&subprog_decl,
3282 tree_cons (get_identifier ("always_inline"),
3283 NULL_TREE, NULL_TREE),
3284 ATTR_FLAG_TYPE_IN_PLACE);
3285
3286 /* Inline_Always guarantees that every direct call is inlined and
3287 that there is no indirect reference to the subprogram, so the
3288 instance in the original package (as well as its clones in the
3289 client packages created for inter-unit inlining) can be made
3290 private, which causes the out-of-line body to be eliminated. */
3291 TREE_PUBLIC (subprog_decl) = 0;
3292 }
3293
3294 /* ... fall through ... */
3295
3296 case is_prescribed:
3297 DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3298
3299 /* ... fall through ... */
3300
3301 case is_requested:
3302 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3303 if (!Debug_Generated_Code)
3304 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3305 break;
3306
3307 default:
3308 gcc_unreachable ();
3309 }
3310
3311 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3312
3313 /* Once everything is processed, finish the subprogram declaration. */
3314 finish_subprog_decl (subprog_decl, asm_name, type);
3315
3316 /* Add this decl to the current binding level. */
3317 gnat_pushdecl (subprog_decl, gnat_node);
3318
3319 /* Output the assembler code and/or RTL for the declaration. */
3320 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3321
3322 return subprog_decl;
3323 }
3324
3325 /* Given a subprogram declaration DECL, its assembler name and its type,
3326 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3327
3328 void
3329 finish_subprog_decl (tree decl, tree asm_name, tree type)
3330 {
3331 tree result_decl
3332 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3333 TREE_TYPE (type));
3334
3335 DECL_ARTIFICIAL (result_decl) = 1;
3336 DECL_IGNORED_P (result_decl) = 1;
3337 DECL_CONTEXT (result_decl) = decl;
3338 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3339 DECL_RESULT (decl) = result_decl;
3340
3341 /* Propagate the "const" property. */
3342 TREE_READONLY (decl) = TYPE_READONLY (type);
3343
3344 /* Propagate the "pure" property. */
3345 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3346
3347 /* Propagate the "noreturn" property. */
3348 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3349
3350 if (asm_name)
3351 {
3352 /* Let the target mangle the name if this isn't a verbatim asm. */
3353 if (*IDENTIFIER_POINTER (asm_name) != '*')
3354 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3355
3356 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3357
3358 /* The expand_main_function circuitry expects "main_identifier_node" to
3359 designate the DECL_NAME of the 'main' entry point, in turn expected
3360 to be declared as the "main" function literally by default. Ada
3361 program entry points are typically declared with a different name
3362 within the binder generated file, exported as 'main' to satisfy the
3363 system expectations. Force main_identifier_node in this case. */
3364 if (asm_name == main_identifier_node)
3365 DECL_NAME (decl) = main_identifier_node;
3366 }
3367 }
3368 \f
3369 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3370 body. This routine needs to be invoked before processing the declarations
3371 appearing in the subprogram. */
3372
3373 void
3374 begin_subprog_body (tree subprog_decl)
3375 {
3376 tree param_decl;
3377
3378 announce_function (subprog_decl);
3379
3380 /* This function is being defined. */
3381 TREE_STATIC (subprog_decl) = 1;
3382
3383 /* The failure of this assertion will likely come from a wrong context for
3384 the subprogram body, e.g. another procedure for a procedure declared at
3385 library level. */
3386 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3387
3388 current_function_decl = subprog_decl;
3389
3390 /* Enter a new binding level and show that all the parameters belong to
3391 this function. */
3392 gnat_pushlevel ();
3393
3394 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3395 param_decl = DECL_CHAIN (param_decl))
3396 DECL_CONTEXT (param_decl) = subprog_decl;
3397
3398 make_decl_rtl (subprog_decl);
3399 }
3400
3401 /* Finish translating the current subprogram and set its BODY. */
3402
3403 void
3404 end_subprog_body (tree body)
3405 {
3406 tree fndecl = current_function_decl;
3407
3408 /* Attach the BLOCK for this level to the function and pop the level. */
3409 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3410 DECL_INITIAL (fndecl) = current_binding_level->block;
3411 gnat_poplevel ();
3412
3413 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3414 if (TREE_CODE (body) == BIND_EXPR)
3415 {
3416 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3417 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3418 }
3419
3420 DECL_SAVED_TREE (fndecl) = body;
3421
3422 current_function_decl = decl_function_context (fndecl);
3423 }
3424
3425 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3426
3427 void
3428 rest_of_subprog_body_compilation (tree subprog_decl)
3429 {
3430 /* We cannot track the location of errors past this point. */
3431 Current_Error_Node = Empty;
3432
3433 /* If we're only annotating types, don't actually compile this function. */
3434 if (type_annotate_only)
3435 return;
3436
3437 /* Dump functions before gimplification. */
3438 dump_function (TDI_original, subprog_decl);
3439
3440 if (!decl_function_context (subprog_decl))
3441 cgraph_node::finalize_function (subprog_decl, false);
3442 else
3443 /* Register this function with cgraph just far enough to get it
3444 added to our parent's nested function list. */
3445 (void) cgraph_node::get_create (subprog_decl);
3446 }
3447
3448 tree
3449 gnat_builtin_function (tree decl)
3450 {
3451 gnat_pushdecl (decl, Empty);
3452 return decl;
3453 }
3454
3455 /* Return an integer type with the number of bits of precision given by
3456 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3457 it is a signed type. */
3458
3459 tree
3460 gnat_type_for_size (unsigned precision, int unsignedp)
3461 {
3462 tree t;
3463 char type_name[20];
3464
3465 if (precision <= 2 * MAX_BITS_PER_WORD
3466 && signed_and_unsigned_types[precision][unsignedp])
3467 return signed_and_unsigned_types[precision][unsignedp];
3468
3469 if (unsignedp)
3470 t = make_unsigned_type (precision);
3471 else
3472 t = make_signed_type (precision);
3473 TYPE_ARTIFICIAL (t) = 1;
3474
3475 if (precision <= 2 * MAX_BITS_PER_WORD)
3476 signed_and_unsigned_types[precision][unsignedp] = t;
3477
3478 if (!TYPE_NAME (t))
3479 {
3480 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3481 TYPE_NAME (t) = get_identifier (type_name);
3482 }
3483
3484 return t;
3485 }
3486
3487 /* Likewise for floating-point types. */
3488
3489 static tree
3490 float_type_for_precision (int precision, machine_mode mode)
3491 {
3492 tree t;
3493 char type_name[20];
3494
3495 if (float_types[(int) mode])
3496 return float_types[(int) mode];
3497
3498 float_types[(int) mode] = t = make_node (REAL_TYPE);
3499 TYPE_PRECISION (t) = precision;
3500 layout_type (t);
3501
3502 gcc_assert (TYPE_MODE (t) == mode);
3503 if (!TYPE_NAME (t))
3504 {
3505 sprintf (type_name, "FLOAT_%d", precision);
3506 TYPE_NAME (t) = get_identifier (type_name);
3507 }
3508
3509 return t;
3510 }
3511
3512 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3513 an unsigned type; otherwise a signed type is returned. */
3514
3515 tree
3516 gnat_type_for_mode (machine_mode mode, int unsignedp)
3517 {
3518 if (mode == BLKmode)
3519 return NULL_TREE;
3520
3521 if (mode == VOIDmode)
3522 return void_type_node;
3523
3524 if (COMPLEX_MODE_P (mode))
3525 return NULL_TREE;
3526
3527 scalar_float_mode float_mode;
3528 if (is_a <scalar_float_mode> (mode, &float_mode))
3529 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3530 float_mode);
3531
3532 scalar_int_mode int_mode;
3533 if (is_a <scalar_int_mode> (mode, &int_mode))
3534 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3535
3536 if (VECTOR_MODE_P (mode))
3537 {
3538 machine_mode inner_mode = GET_MODE_INNER (mode);
3539 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3540 if (inner_type)
3541 return build_vector_type_for_mode (inner_type, mode);
3542 }
3543
3544 return NULL_TREE;
3545 }
3546
3547 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3548 signedness being specified by UNSIGNEDP. */
3549
3550 tree
3551 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3552 {
3553 if (type_node == char_type_node)
3554 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3555
3556 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3557
3558 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3559 {
3560 type = copy_type (type);
3561 TREE_TYPE (type) = type_node;
3562 }
3563 else if (TREE_TYPE (type_node)
3564 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3565 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3566 {
3567 type = copy_type (type);
3568 TREE_TYPE (type) = TREE_TYPE (type_node);
3569 }
3570
3571 return type;
3572 }
3573
3574 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3575 transparently converted to each other. */
3576
3577 int
3578 gnat_types_compatible_p (tree t1, tree t2)
3579 {
3580 enum tree_code code;
3581
3582 /* This is the default criterion. */
3583 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3584 return 1;
3585
3586 /* We only check structural equivalence here. */
3587 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3588 return 0;
3589
3590 /* Vector types are also compatible if they have the same number of subparts
3591 and the same form of (scalar) element type. */
3592 if (code == VECTOR_TYPE
3593 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3594 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3595 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3596 return 1;
3597
3598 /* Array types are also compatible if they are constrained and have the same
3599 domain(s), the same component type and the same scalar storage order. */
3600 if (code == ARRAY_TYPE
3601 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3602 || (TYPE_DOMAIN (t1)
3603 && TYPE_DOMAIN (t2)
3604 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3605 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3606 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3607 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3608 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3609 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3610 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3611 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3612 return 1;
3613
3614 return 0;
3615 }
3616
3617 /* Return true if EXPR is a useless type conversion. */
3618
3619 bool
3620 gnat_useless_type_conversion (tree expr)
3621 {
3622 if (CONVERT_EXPR_P (expr)
3623 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3624 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3625 return gnat_types_compatible_p (TREE_TYPE (expr),
3626 TREE_TYPE (TREE_OPERAND (expr, 0)));
3627
3628 return false;
3629 }
3630
3631 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags. */
3632
3633 bool
3634 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3635 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3636 {
3637 return TYPE_CI_CO_LIST (t) == cico_list
3638 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3639 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3640 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3641 }
3642 \f
3643 /* EXP is an expression for the size of an object. If this size contains
3644 discriminant references, replace them with the maximum (if MAX_P) or
3645 minimum (if !MAX_P) possible value of the discriminant. */
3646
3647 tree
3648 max_size (tree exp, bool max_p)
3649 {
3650 enum tree_code code = TREE_CODE (exp);
3651 tree type = TREE_TYPE (exp);
3652 tree op0, op1, op2;
3653
3654 switch (TREE_CODE_CLASS (code))
3655 {
3656 case tcc_declaration:
3657 case tcc_constant:
3658 return exp;
3659
3660 case tcc_exceptional:
3661 gcc_assert (code == SSA_NAME);
3662 return exp;
3663
3664 case tcc_vl_exp:
3665 if (code == CALL_EXPR)
3666 {
3667 tree t, *argarray;
3668 int n, i;
3669
3670 t = maybe_inline_call_in_expr (exp);
3671 if (t)
3672 return max_size (t, max_p);
3673
3674 n = call_expr_nargs (exp);
3675 gcc_assert (n > 0);
3676 argarray = XALLOCAVEC (tree, n);
3677 for (i = 0; i < n; i++)
3678 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3679 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3680 }
3681 break;
3682
3683 case tcc_reference:
3684 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3685 modify. Otherwise, we treat it like a variable. */
3686 if (CONTAINS_PLACEHOLDER_P (exp))
3687 {
3688 tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3689 tree val
3690 = fold_convert (base_type,
3691 max_p
3692 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3693
3694 /* Walk down the extra subtypes to get more restrictive bounds. */
3695 while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3696 {
3697 type = TREE_TYPE (type);
3698 if (max_p)
3699 val = fold_build2 (MIN_EXPR, base_type, val,
3700 fold_convert (base_type,
3701 TYPE_MAX_VALUE (type)));
3702 else
3703 val = fold_build2 (MAX_EXPR, base_type, val,
3704 fold_convert (base_type,
3705 TYPE_MIN_VALUE (type)));
3706 }
3707
3708 return fold_convert (type, max_size (val, max_p));
3709 }
3710
3711 return exp;
3712
3713 case tcc_comparison:
3714 return build_int_cst (type, max_p ? 1 : 0);
3715
3716 case tcc_unary:
3717 if (code == NON_LVALUE_EXPR)
3718 return max_size (TREE_OPERAND (exp, 0), max_p);
3719
3720 op0 = max_size (TREE_OPERAND (exp, 0),
3721 code == NEGATE_EXPR ? !max_p : max_p);
3722
3723 if (op0 == TREE_OPERAND (exp, 0))
3724 return exp;
3725
3726 return fold_build1 (code, type, op0);
3727
3728 case tcc_binary:
3729 op0 = TREE_OPERAND (exp, 0);
3730 op1 = TREE_OPERAND (exp, 1);
3731
3732 /* If we have a multiply-add with a "negative" value in an unsigned
3733 type, do a multiply-subtract with the negated value, in order to
3734 avoid creating a spurious overflow below. */
3735 if (code == PLUS_EXPR
3736 && TREE_CODE (op0) == MULT_EXPR
3737 && TYPE_UNSIGNED (type)
3738 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3739 && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3740 && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3741 {
3742 tree tmp = op1;
3743 op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3744 fold_build1 (NEGATE_EXPR, type,
3745 TREE_OPERAND (op0, 1)));
3746 op0 = tmp;
3747 code = MINUS_EXPR;
3748 }
3749
3750 op0 = max_size (op0, max_p);
3751 op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3752
3753 if ((code == MINUS_EXPR || code == PLUS_EXPR))
3754 {
3755 /* If the op0 has overflowed and the op1 is a variable,
3756 propagate the overflow by returning the op0. */
3757 if (TREE_CODE (op0) == INTEGER_CST
3758 && TREE_OVERFLOW (op0)
3759 && TREE_CODE (op1) != INTEGER_CST)
3760 return op0;
3761
3762 /* If we have a "negative" value in an unsigned type, do the
3763 opposite operation on the negated value, in order to avoid
3764 creating a spurious overflow below. */
3765 if (TYPE_UNSIGNED (type)
3766 && TREE_CODE (op1) == INTEGER_CST
3767 && !TREE_OVERFLOW (op1)
3768 && tree_int_cst_sign_bit (op1))
3769 {
3770 op1 = fold_build1 (NEGATE_EXPR, type, op1);
3771 code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3772 }
3773 }
3774
3775 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3776 return exp;
3777
3778 /* We need to detect overflows so we call size_binop here. */
3779 return size_binop (code, op0, op1);
3780
3781 case tcc_expression:
3782 switch (TREE_CODE_LENGTH (code))
3783 {
3784 case 1:
3785 if (code == SAVE_EXPR)
3786 return exp;
3787
3788 op0 = max_size (TREE_OPERAND (exp, 0),
3789 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3790
3791 if (op0 == TREE_OPERAND (exp, 0))
3792 return exp;
3793
3794 return fold_build1 (code, type, op0);
3795
3796 case 2:
3797 if (code == COMPOUND_EXPR)
3798 return max_size (TREE_OPERAND (exp, 1), max_p);
3799
3800 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3801 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3802
3803 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3804 return exp;
3805
3806 return fold_build2 (code, type, op0, op1);
3807
3808 case 3:
3809 if (code == COND_EXPR)
3810 {
3811 op0 = TREE_OPERAND (exp, 0);
3812 op1 = TREE_OPERAND (exp, 1);
3813 op2 = TREE_OPERAND (exp, 2);
3814
3815 if (!op1 || !op2)
3816 return exp;
3817
3818 op1 = max_size (op1, max_p);
3819 op2 = max_size (op2, max_p);
3820
3821 /* If we have the MAX of a "negative" value in an unsigned type
3822 and zero for a length expression, just return zero. */
3823 if (max_p
3824 && TREE_CODE (op0) == LE_EXPR
3825 && TYPE_UNSIGNED (type)
3826 && TREE_CODE (op1) == INTEGER_CST
3827 && !TREE_OVERFLOW (op1)
3828 && tree_int_cst_sign_bit (op1)
3829 && integer_zerop (op2))
3830 return op2;
3831
3832 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
3833 }
3834 break;
3835
3836 default:
3837 break;
3838 }
3839
3840 /* Other tree classes cannot happen. */
3841 default:
3842 break;
3843 }
3844
3845 gcc_unreachable ();
3846 }
3847 \f
3848 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3849 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3850 Return a constructor for the template. */
3851
3852 tree
3853 build_template (tree template_type, tree array_type, tree expr)
3854 {
3855 vec<constructor_elt, va_gc> *template_elts = NULL;
3856 tree bound_list = NULL_TREE;
3857 tree field;
3858
3859 while (TREE_CODE (array_type) == RECORD_TYPE
3860 && (TYPE_PADDING_P (array_type)
3861 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3862 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3863
3864 if (TREE_CODE (array_type) == ARRAY_TYPE
3865 || (TREE_CODE (array_type) == INTEGER_TYPE
3866 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3867 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3868
3869 /* First make the list for a CONSTRUCTOR for the template. Go down the
3870 field list of the template instead of the type chain because this
3871 array might be an Ada array of arrays and we can't tell where the
3872 nested arrays stop being the underlying object. */
3873
3874 for (field = TYPE_FIELDS (template_type); field;
3875 (bound_list
3876 ? (bound_list = TREE_CHAIN (bound_list))
3877 : (array_type = TREE_TYPE (array_type))),
3878 field = DECL_CHAIN (DECL_CHAIN (field)))
3879 {
3880 tree bounds, min, max;
3881
3882 /* If we have a bound list, get the bounds from there. Likewise
3883 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3884 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3885 This will give us a maximum range. */
3886 if (bound_list)
3887 bounds = TREE_VALUE (bound_list);
3888 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3889 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3890 else if (expr && TREE_CODE (expr) == PARM_DECL
3891 && DECL_BY_COMPONENT_PTR_P (expr))
3892 bounds = TREE_TYPE (field);
3893 else
3894 gcc_unreachable ();
3895
3896 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3897 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3898
3899 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3900 substitute it from OBJECT. */
3901 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3902 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3903
3904 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3905 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3906 }
3907
3908 return gnat_build_constructor (template_type, template_elts);
3909 }
3910 \f
3911 /* Return true if TYPE is suitable for the element type of a vector. */
3912
3913 static bool
3914 type_for_vector_element_p (tree type)
3915 {
3916 machine_mode mode;
3917
3918 if (!INTEGRAL_TYPE_P (type)
3919 && !SCALAR_FLOAT_TYPE_P (type)
3920 && !FIXED_POINT_TYPE_P (type))
3921 return false;
3922
3923 mode = TYPE_MODE (type);
3924 if (GET_MODE_CLASS (mode) != MODE_INT
3925 && !SCALAR_FLOAT_MODE_P (mode)
3926 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3927 return false;
3928
3929 return true;
3930 }
3931
3932 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3933 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3934 attribute declaration and want to issue error messages on failure. */
3935
3936 static tree
3937 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3938 {
3939 unsigned HOST_WIDE_INT size_int, inner_size_int;
3940 int nunits;
3941
3942 /* Silently punt on variable sizes. We can't make vector types for them,
3943 need to ignore them on front-end generated subtypes of unconstrained
3944 base types, and this attribute is for binding implementors, not end
3945 users, so we should never get there from legitimate explicit uses. */
3946 if (!tree_fits_uhwi_p (size))
3947 return NULL_TREE;
3948 size_int = tree_to_uhwi (size);
3949
3950 if (!type_for_vector_element_p (inner_type))
3951 {
3952 if (attribute)
3953 error ("invalid element type for attribute %qs",
3954 IDENTIFIER_POINTER (attribute));
3955 return NULL_TREE;
3956 }
3957 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3958
3959 if (size_int % inner_size_int)
3960 {
3961 if (attribute)
3962 error ("vector size not an integral multiple of component size");
3963 return NULL_TREE;
3964 }
3965
3966 if (size_int == 0)
3967 {
3968 if (attribute)
3969 error ("zero vector size");
3970 return NULL_TREE;
3971 }
3972
3973 nunits = size_int / inner_size_int;
3974 if (nunits & (nunits - 1))
3975 {
3976 if (attribute)
3977 error ("number of components of vector not a power of two");
3978 return NULL_TREE;
3979 }
3980
3981 return build_vector_type (inner_type, nunits);
3982 }
3983
3984 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3985 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3986 processing the attribute and want to issue error messages on failure. */
3987
3988 static tree
3989 build_vector_type_for_array (tree array_type, tree attribute)
3990 {
3991 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3992 TYPE_SIZE_UNIT (array_type),
3993 attribute);
3994 if (!vector_type)
3995 return NULL_TREE;
3996
3997 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3998 return vector_type;
3999 }
4000 \f
4001 /* Build a type to be used to represent an aliased object whose nominal type
4002 is an unconstrained array. This consists of a RECORD_TYPE containing a
4003 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4004 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4005 an arbitrary unconstrained object. Use NAME as the name of the record.
4006 DEBUG_INFO_P is true if we need to write debug information for the type. */
4007
4008 tree
4009 build_unc_object_type (tree template_type, tree object_type, tree name,
4010 bool debug_info_p)
4011 {
4012 tree decl;
4013 tree type = make_node (RECORD_TYPE);
4014 tree template_field
4015 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4016 NULL_TREE, NULL_TREE, 0, 1);
4017 tree array_field
4018 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4019 NULL_TREE, NULL_TREE, 0, 1);
4020
4021 TYPE_NAME (type) = name;
4022 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4023 DECL_CHAIN (template_field) = array_field;
4024 finish_record_type (type, template_field, 0, true);
4025
4026 /* Declare it now since it will never be declared otherwise. This is
4027 necessary to ensure that its subtrees are properly marked. */
4028 decl = create_type_decl (name, type, true, debug_info_p, Empty);
4029
4030 /* template_type will not be used elsewhere than here, so to keep the debug
4031 info clean and in order to avoid scoping issues, make decl its
4032 context. */
4033 gnat_set_type_context (template_type, decl);
4034
4035 return type;
4036 }
4037
4038 /* Same, taking a thin or fat pointer type instead of a template type. */
4039
4040 tree
4041 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4042 tree name, bool debug_info_p)
4043 {
4044 tree template_type;
4045
4046 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4047
4048 template_type
4049 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4050 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4051 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4052
4053 return
4054 build_unc_object_type (template_type, object_type, name, debug_info_p);
4055 }
4056 \f
4057 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4058 In the normal case this is just two adjustments, but we have more to
4059 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4060
4061 void
4062 update_pointer_to (tree old_type, tree new_type)
4063 {
4064 tree ptr = TYPE_POINTER_TO (old_type);
4065 tree ref = TYPE_REFERENCE_TO (old_type);
4066 tree t;
4067
4068 /* If this is the main variant, process all the other variants first. */
4069 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4070 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4071 update_pointer_to (t, new_type);
4072
4073 /* If no pointers and no references, we are done. */
4074 if (!ptr && !ref)
4075 return;
4076
4077 /* Merge the old type qualifiers in the new type.
4078
4079 Each old variant has qualifiers for specific reasons, and the new
4080 designated type as well. Each set of qualifiers represents useful
4081 information grabbed at some point, and merging the two simply unifies
4082 these inputs into the final type description.
4083
4084 Consider for instance a volatile type frozen after an access to constant
4085 type designating it; after the designated type's freeze, we get here with
4086 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4087 when the access type was processed. We will make a volatile and readonly
4088 designated type, because that's what it really is.
4089
4090 We might also get here for a non-dummy OLD_TYPE variant with different
4091 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4092 to private record type elaboration (see the comments around the call to
4093 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4094 the qualifiers in those cases too, to avoid accidentally discarding the
4095 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4096 new_type
4097 = build_qualified_type (new_type,
4098 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4099
4100 /* If old type and new type are identical, there is nothing to do. */
4101 if (old_type == new_type)
4102 return;
4103
4104 /* Otherwise, first handle the simple case. */
4105 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4106 {
4107 tree new_ptr, new_ref;
4108
4109 /* If pointer or reference already points to new type, nothing to do.
4110 This can happen as update_pointer_to can be invoked multiple times
4111 on the same couple of types because of the type variants. */
4112 if ((ptr && TREE_TYPE (ptr) == new_type)
4113 || (ref && TREE_TYPE (ref) == new_type))
4114 return;
4115
4116 /* Chain PTR and its variants at the end. */
4117 new_ptr = TYPE_POINTER_TO (new_type);
4118 if (new_ptr)
4119 {
4120 while (TYPE_NEXT_PTR_TO (new_ptr))
4121 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4122 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4123 }
4124 else
4125 TYPE_POINTER_TO (new_type) = ptr;
4126
4127 /* Now adjust them. */
4128 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4129 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4130 {
4131 TREE_TYPE (t) = new_type;
4132 if (TYPE_NULL_BOUNDS (t))
4133 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4134 }
4135
4136 /* Chain REF and its variants at the end. */
4137 new_ref = TYPE_REFERENCE_TO (new_type);
4138 if (new_ref)
4139 {
4140 while (TYPE_NEXT_REF_TO (new_ref))
4141 new_ref = TYPE_NEXT_REF_TO (new_ref);
4142 TYPE_NEXT_REF_TO (new_ref) = ref;
4143 }
4144 else
4145 TYPE_REFERENCE_TO (new_type) = ref;
4146
4147 /* Now adjust them. */
4148 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4149 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4150 TREE_TYPE (t) = new_type;
4151
4152 TYPE_POINTER_TO (old_type) = NULL_TREE;
4153 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4154 }
4155
4156 /* Now deal with the unconstrained array case. In this case the pointer
4157 is actually a record where both fields are pointers to dummy nodes.
4158 Turn them into pointers to the correct types using update_pointer_to.
4159 Likewise for the pointer to the object record (thin pointer). */
4160 else
4161 {
4162 tree new_ptr = TYPE_POINTER_TO (new_type);
4163
4164 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4165
4166 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4167 since update_pointer_to can be invoked multiple times on the same
4168 couple of types because of the type variants. */
4169 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4170 return;
4171
4172 update_pointer_to
4173 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4174 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4175
4176 update_pointer_to
4177 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4178 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4179
4180 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4181 TYPE_OBJECT_RECORD_TYPE (new_type));
4182
4183 TYPE_POINTER_TO (old_type) = NULL_TREE;
4184 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4185 }
4186 }
4187 \f
4188 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4189 unconstrained one. This involves making or finding a template. */
4190
4191 static tree
4192 convert_to_fat_pointer (tree type, tree expr)
4193 {
4194 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4195 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4196 tree etype = TREE_TYPE (expr);
4197 tree template_addr;
4198 vec<constructor_elt, va_gc> *v;
4199 vec_alloc (v, 2);
4200
4201 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4202 array (compare_fat_pointers ensures that this is the full discriminant)
4203 and a valid pointer to the bounds. This latter property is necessary
4204 since the compiler can hoist the load of the bounds done through it. */
4205 if (integer_zerop (expr))
4206 {
4207 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4208 tree null_bounds, t;
4209
4210 if (TYPE_NULL_BOUNDS (ptr_template_type))
4211 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4212 else
4213 {
4214 /* The template type can still be dummy at this point so we build an
4215 empty constructor. The middle-end will fill it in with zeros. */
4216 t = build_constructor (template_type, NULL);
4217 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4218 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4219 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4220 }
4221
4222 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4223 fold_convert (p_array_type, null_pointer_node));
4224 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4225 t = build_constructor (type, v);
4226 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4227 TREE_CONSTANT (t) = 0;
4228 TREE_STATIC (t) = 1;
4229
4230 return t;
4231 }
4232
4233 /* If EXPR is a thin pointer, make template and data from the record. */
4234 if (TYPE_IS_THIN_POINTER_P (etype))
4235 {
4236 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4237
4238 expr = gnat_protect_expr (expr);
4239
4240 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4241 the thin pointer value has been shifted so we shift it back to get
4242 the template address. */
4243 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4244 {
4245 template_addr
4246 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4247 fold_build1 (NEGATE_EXPR, sizetype,
4248 byte_position
4249 (DECL_CHAIN (field))));
4250 template_addr
4251 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4252 template_addr);
4253 }
4254
4255 /* Otherwise we explicitly take the address of the fields. */
4256 else
4257 {
4258 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4259 template_addr
4260 = build_unary_op (ADDR_EXPR, NULL_TREE,
4261 build_component_ref (expr, field, false));
4262 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4263 build_component_ref (expr, DECL_CHAIN (field),
4264 false));
4265 }
4266 }
4267
4268 /* Otherwise, build the constructor for the template. */
4269 else
4270 template_addr
4271 = build_unary_op (ADDR_EXPR, NULL_TREE,
4272 build_template (template_type, TREE_TYPE (etype),
4273 expr));
4274
4275 /* The final result is a constructor for the fat pointer.
4276
4277 If EXPR is an argument of a foreign convention subprogram, the type it
4278 points to is directly the component type. In this case, the expression
4279 type may not match the corresponding FIELD_DECL type at this point, so we
4280 call "convert" here to fix that up if necessary. This type consistency is
4281 required, for instance because it ensures that possible later folding of
4282 COMPONENT_REFs against this constructor always yields something of the
4283 same type as the initial reference.
4284
4285 Note that the call to "build_template" above is still fine because it
4286 will only refer to the provided TEMPLATE_TYPE in this case. */
4287 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4288 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4289 return gnat_build_constructor (type, v);
4290 }
4291 \f
4292 /* Create an expression whose value is that of EXPR,
4293 converted to type TYPE. The TREE_TYPE of the value
4294 is always TYPE. This function implements all reasonable
4295 conversions; callers should filter out those that are
4296 not permitted by the language being compiled. */
4297
4298 tree
4299 convert (tree type, tree expr)
4300 {
4301 tree etype = TREE_TYPE (expr);
4302 enum tree_code ecode = TREE_CODE (etype);
4303 enum tree_code code = TREE_CODE (type);
4304
4305 /* If the expression is already of the right type, we are done. */
4306 if (etype == type)
4307 return expr;
4308
4309 /* If both input and output have padding and are of variable size, do this
4310 as an unchecked conversion. Likewise if one is a mere variant of the
4311 other, so we avoid a pointless unpad/repad sequence. */
4312 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4313 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4314 && (!TREE_CONSTANT (TYPE_SIZE (type))
4315 || !TREE_CONSTANT (TYPE_SIZE (etype))
4316 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4317 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4318 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4319 ;
4320
4321 /* If the output type has padding, convert to the inner type and make a
4322 constructor to build the record, unless a variable size is involved. */
4323 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4324 {
4325 /* If we previously converted from another type and our type is
4326 of variable size, remove the conversion to avoid the need for
4327 variable-sized temporaries. Likewise for a conversion between
4328 original and packable version. */
4329 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4330 && (!TREE_CONSTANT (TYPE_SIZE (type))
4331 || (ecode == RECORD_TYPE
4332 && TYPE_NAME (etype)
4333 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4334 expr = TREE_OPERAND (expr, 0);
4335
4336 /* If we are just removing the padding from expr, convert the original
4337 object if we have variable size in order to avoid the need for some
4338 variable-sized temporaries. Likewise if the padding is a variant
4339 of the other, so we avoid a pointless unpad/repad sequence. */
4340 if (TREE_CODE (expr) == COMPONENT_REF
4341 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4342 && (!TREE_CONSTANT (TYPE_SIZE (type))
4343 || TYPE_MAIN_VARIANT (type)
4344 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4345 || (ecode == RECORD_TYPE
4346 && TYPE_NAME (etype)
4347 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4348 return convert (type, TREE_OPERAND (expr, 0));
4349
4350 /* If the inner type is of self-referential size and the expression type
4351 is a record, do this as an unchecked conversion unless both types are
4352 essentially the same. But first pad the expression if possible to
4353 have the same size on both sides. */
4354 if (ecode == RECORD_TYPE
4355 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4356 && TYPE_MAIN_VARIANT (etype)
4357 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4358 {
4359 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4360 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4361 false, false, false, true),
4362 expr);
4363 return unchecked_convert (type, expr, false);
4364 }
4365
4366 /* If we are converting between array types with variable size, do the
4367 final conversion as an unchecked conversion, again to avoid the need
4368 for some variable-sized temporaries. If valid, this conversion is
4369 very likely purely technical and without real effects. */
4370 if (ecode == ARRAY_TYPE
4371 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4372 && !TREE_CONSTANT (TYPE_SIZE (etype))
4373 && !TREE_CONSTANT (TYPE_SIZE (type)))
4374 return unchecked_convert (type,
4375 convert (TREE_TYPE (TYPE_FIELDS (type)),
4376 expr),
4377 false);
4378
4379 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4380
4381 /* If converting to the inner type has already created a CONSTRUCTOR with
4382 the right size, then reuse it instead of creating another one. This
4383 can happen for the padding type built to overalign local variables. */
4384 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4385 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4386 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4387 && tree_int_cst_equal (TYPE_SIZE (type),
4388 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4389 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4390
4391 vec<constructor_elt, va_gc> *v;
4392 vec_alloc (v, 1);
4393 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4394 return gnat_build_constructor (type, v);
4395 }
4396
4397 /* If the input type has padding, remove it and convert to the output type.
4398 The conditions ordering is arranged to ensure that the output type is not
4399 a padding type here, as it is not clear whether the conversion would
4400 always be correct if this was to happen. */
4401 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4402 {
4403 tree unpadded;
4404
4405 /* If we have just converted to this padded type, just get the
4406 inner expression. */
4407 if (TREE_CODE (expr) == CONSTRUCTOR)
4408 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4409
4410 /* Otherwise, build an explicit component reference. */
4411 else
4412 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4413
4414 return convert (type, unpadded);
4415 }
4416
4417 /* If the input is a biased type, convert first to the base type and add
4418 the bias. Note that the bias must go through a full conversion to the
4419 base type, lest it is itself a biased value; this happens for subtypes
4420 of biased types. */
4421 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4422 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4423 fold_convert (TREE_TYPE (etype), expr),
4424 convert (TREE_TYPE (etype),
4425 TYPE_MIN_VALUE (etype))));
4426
4427 /* If the input is a justified modular type, we need to extract the actual
4428 object before converting it to an other type with the exceptions of an
4429 [unconstrained] array or a mere type variant. It is useful to avoid
4430 the extraction and conversion in these cases because it could end up
4431 replacing a VAR_DECL by a constructor and we might be about the take
4432 the address of the result. */
4433 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4434 && code != ARRAY_TYPE
4435 && code != UNCONSTRAINED_ARRAY_TYPE
4436 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4437 return
4438 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4439
4440 /* If converting to a type that contains a template, convert to the data
4441 type and then build the template. */
4442 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4443 {
4444 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4445 vec<constructor_elt, va_gc> *v;
4446 vec_alloc (v, 2);
4447
4448 /* If the source already has a template, get a reference to the
4449 associated array only, as we are going to rebuild a template
4450 for the target type anyway. */
4451 expr = maybe_unconstrained_array (expr);
4452
4453 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4454 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4455 obj_type, NULL_TREE));
4456 if (expr)
4457 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4458 convert (obj_type, expr));
4459 return gnat_build_constructor (type, v);
4460 }
4461
4462 /* There are some cases of expressions that we process specially. */
4463 switch (TREE_CODE (expr))
4464 {
4465 case ERROR_MARK:
4466 return expr;
4467
4468 case NULL_EXPR:
4469 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4470 conversion in gnat_expand_expr. NULL_EXPR does not represent
4471 and actual value, so no conversion is needed. */
4472 expr = copy_node (expr);
4473 TREE_TYPE (expr) = type;
4474 return expr;
4475
4476 case STRING_CST:
4477 /* If we are converting a STRING_CST to another constrained array type,
4478 just make a new one in the proper type. */
4479 if (code == ecode && AGGREGATE_TYPE_P (etype)
4480 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4481 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4482 {
4483 expr = copy_node (expr);
4484 TREE_TYPE (expr) = type;
4485 return expr;
4486 }
4487 break;
4488
4489 case VECTOR_CST:
4490 /* If we are converting a VECTOR_CST to a mere type variant, just make
4491 a new one in the proper type. */
4492 if (code == ecode && gnat_types_compatible_p (type, etype))
4493 {
4494 expr = copy_node (expr);
4495 TREE_TYPE (expr) = type;
4496 return expr;
4497 }
4498 break;
4499
4500 case CONSTRUCTOR:
4501 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4502 another padding type around the same type, just make a new one in
4503 the proper type. */
4504 if (code == ecode
4505 && (gnat_types_compatible_p (type, etype)
4506 || (code == RECORD_TYPE
4507 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4508 && TREE_TYPE (TYPE_FIELDS (type))
4509 == TREE_TYPE (TYPE_FIELDS (etype)))))
4510 {
4511 expr = copy_node (expr);
4512 TREE_TYPE (expr) = type;
4513 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4514 return expr;
4515 }
4516
4517 /* Likewise for a conversion between original and packable version, or
4518 conversion between types of the same size and with the same list of
4519 fields, but we have to work harder to preserve type consistency. */
4520 if (code == ecode
4521 && code == RECORD_TYPE
4522 && (TYPE_NAME (type) == TYPE_NAME (etype)
4523 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4524
4525 {
4526 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4527 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4528 vec<constructor_elt, va_gc> *v;
4529 vec_alloc (v, len);
4530 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4531 unsigned HOST_WIDE_INT idx;
4532 tree index, value;
4533
4534 /* Whether we need to clear TREE_CONSTANT et al. on the output
4535 constructor when we convert in place. */
4536 bool clear_constant = false;
4537
4538 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4539 {
4540 /* Skip the missing fields in the CONSTRUCTOR. */
4541 while (efield && field && !SAME_FIELD_P (efield, index))
4542 {
4543 efield = DECL_CHAIN (efield);
4544 field = DECL_CHAIN (field);
4545 }
4546 /* The field must be the same. */
4547 if (!(efield && field && SAME_FIELD_P (efield, field)))
4548 break;
4549 constructor_elt elt
4550 = {field, convert (TREE_TYPE (field), value)};
4551 v->quick_push (elt);
4552
4553 /* If packing has made this field a bitfield and the input
4554 value couldn't be emitted statically any more, we need to
4555 clear TREE_CONSTANT on our output. */
4556 if (!clear_constant
4557 && TREE_CONSTANT (expr)
4558 && !CONSTRUCTOR_BITFIELD_P (efield)
4559 && CONSTRUCTOR_BITFIELD_P (field)
4560 && !initializer_constant_valid_for_bitfield_p (value))
4561 clear_constant = true;
4562
4563 efield = DECL_CHAIN (efield);
4564 field = DECL_CHAIN (field);
4565 }
4566
4567 /* If we have been able to match and convert all the input fields
4568 to their output type, convert in place now. We'll fallback to a
4569 view conversion downstream otherwise. */
4570 if (idx == len)
4571 {
4572 expr = copy_node (expr);
4573 TREE_TYPE (expr) = type;
4574 CONSTRUCTOR_ELTS (expr) = v;
4575 if (clear_constant)
4576 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4577 return expr;
4578 }
4579 }
4580
4581 /* Likewise for a conversion between array type and vector type with a
4582 compatible representative array. */
4583 else if (code == VECTOR_TYPE
4584 && ecode == ARRAY_TYPE
4585 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4586 etype))
4587 {
4588 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4589 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4590 vec<constructor_elt, va_gc> *v;
4591 unsigned HOST_WIDE_INT ix;
4592 tree value;
4593
4594 /* Build a VECTOR_CST from a *constant* array constructor. */
4595 if (TREE_CONSTANT (expr))
4596 {
4597 bool constant_p = true;
4598
4599 /* Iterate through elements and check if all constructor
4600 elements are *_CSTs. */
4601 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4602 if (!CONSTANT_CLASS_P (value))
4603 {
4604 constant_p = false;
4605 break;
4606 }
4607
4608 if (constant_p)
4609 return build_vector_from_ctor (type,
4610 CONSTRUCTOR_ELTS (expr));
4611 }
4612
4613 /* Otherwise, build a regular vector constructor. */
4614 vec_alloc (v, len);
4615 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4616 {
4617 constructor_elt elt = {NULL_TREE, value};
4618 v->quick_push (elt);
4619 }
4620 expr = copy_node (expr);
4621 TREE_TYPE (expr) = type;
4622 CONSTRUCTOR_ELTS (expr) = v;
4623 return expr;
4624 }
4625 break;
4626
4627 case UNCONSTRAINED_ARRAY_REF:
4628 /* First retrieve the underlying array. */
4629 expr = maybe_unconstrained_array (expr);
4630 etype = TREE_TYPE (expr);
4631 ecode = TREE_CODE (etype);
4632 break;
4633
4634 case VIEW_CONVERT_EXPR:
4635 {
4636 /* GCC 4.x is very sensitive to type consistency overall, and view
4637 conversions thus are very frequent. Even though just "convert"ing
4638 the inner operand to the output type is fine in most cases, it
4639 might expose unexpected input/output type mismatches in special
4640 circumstances so we avoid such recursive calls when we can. */
4641 tree op0 = TREE_OPERAND (expr, 0);
4642
4643 /* If we are converting back to the original type, we can just
4644 lift the input conversion. This is a common occurrence with
4645 switches back-and-forth amongst type variants. */
4646 if (type == TREE_TYPE (op0))
4647 return op0;
4648
4649 /* Otherwise, if we're converting between two aggregate or vector
4650 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4651 target type in place or to just convert the inner expression. */
4652 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4653 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4654 {
4655 /* If we are converting between mere variants, we can just
4656 substitute the VIEW_CONVERT_EXPR in place. */
4657 if (gnat_types_compatible_p (type, etype))
4658 return build1 (VIEW_CONVERT_EXPR, type, op0);
4659
4660 /* Otherwise, we may just bypass the input view conversion unless
4661 one of the types is a fat pointer, which is handled by
4662 specialized code below which relies on exact type matching. */
4663 else if (!TYPE_IS_FAT_POINTER_P (type)
4664 && !TYPE_IS_FAT_POINTER_P (etype))
4665 return convert (type, op0);
4666 }
4667
4668 break;
4669 }
4670
4671 default:
4672 break;
4673 }
4674
4675 /* Check for converting to a pointer to an unconstrained array. */
4676 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4677 return convert_to_fat_pointer (type, expr);
4678
4679 /* If we are converting between two aggregate or vector types that are mere
4680 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4681 to a vector type from its representative array type. */
4682 else if ((code == ecode
4683 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4684 && gnat_types_compatible_p (type, etype))
4685 || (code == VECTOR_TYPE
4686 && ecode == ARRAY_TYPE
4687 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4688 etype)))
4689 return build1 (VIEW_CONVERT_EXPR, type, expr);
4690
4691 /* If we are converting between tagged types, try to upcast properly.
4692 But don't do it if we are just annotating types since tagged types
4693 aren't fully laid out in this mode. */
4694 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4695 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4696 && !type_annotate_only)
4697 {
4698 tree child_etype = etype;
4699 do {
4700 tree field = TYPE_FIELDS (child_etype);
4701 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4702 return build_component_ref (expr, field, false);
4703 child_etype = TREE_TYPE (field);
4704 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4705 }
4706
4707 /* If we are converting from a smaller form of record type back to it, just
4708 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4709 size on both sides. */
4710 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4711 && smaller_form_type_p (etype, type))
4712 {
4713 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4714 false, false, false, true),
4715 expr);
4716 return build1 (VIEW_CONVERT_EXPR, type, expr);
4717 }
4718
4719 /* In all other cases of related types, make a NOP_EXPR. */
4720 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4721 return fold_convert (type, expr);
4722
4723 switch (code)
4724 {
4725 case VOID_TYPE:
4726 return fold_build1 (CONVERT_EXPR, type, expr);
4727
4728 case INTEGER_TYPE:
4729 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4730 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4731 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4732 return unchecked_convert (type, expr, false);
4733
4734 /* If the output is a biased type, convert first to the base type and
4735 subtract the bias. Note that the bias itself must go through a full
4736 conversion to the base type, lest it is a biased value; this happens
4737 for subtypes of biased types. */
4738 if (TYPE_BIASED_REPRESENTATION_P (type))
4739 return fold_convert (type,
4740 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4741 convert (TREE_TYPE (type), expr),
4742 convert (TREE_TYPE (type),
4743 TYPE_MIN_VALUE (type))));
4744
4745 /* ... fall through ... */
4746
4747 case ENUMERAL_TYPE:
4748 case BOOLEAN_TYPE:
4749 /* If we are converting an additive expression to an integer type
4750 with lower precision, be wary of the optimization that can be
4751 applied by convert_to_integer. There are 2 problematic cases:
4752 - if the first operand was originally of a biased type,
4753 because we could be recursively called to convert it
4754 to an intermediate type and thus rematerialize the
4755 additive operator endlessly,
4756 - if the expression contains a placeholder, because an
4757 intermediate conversion that changes the sign could
4758 be inserted and thus introduce an artificial overflow
4759 at compile time when the placeholder is substituted. */
4760 if (code == INTEGER_TYPE
4761 && ecode == INTEGER_TYPE
4762 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4763 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4764 {
4765 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4766
4767 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4768 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4769 || CONTAINS_PLACEHOLDER_P (expr))
4770 return build1 (NOP_EXPR, type, expr);
4771 }
4772
4773 return fold (convert_to_integer (type, expr));
4774
4775 case POINTER_TYPE:
4776 case REFERENCE_TYPE:
4777 /* If converting between two thin pointers, adjust if needed to account
4778 for differing offsets from the base pointer, depending on whether
4779 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4780 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4781 {
4782 tree etype_pos
4783 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4784 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4785 : size_zero_node;
4786 tree type_pos
4787 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4788 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4789 : size_zero_node;
4790 tree byte_diff = size_diffop (type_pos, etype_pos);
4791
4792 expr = build1 (NOP_EXPR, type, expr);
4793 if (integer_zerop (byte_diff))
4794 return expr;
4795
4796 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4797 fold_convert (sizetype, byte_diff));
4798 }
4799
4800 /* If converting fat pointer to normal or thin pointer, get the pointer
4801 to the array and then convert it. */
4802 if (TYPE_IS_FAT_POINTER_P (etype))
4803 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4804
4805 return fold (convert_to_pointer (type, expr));
4806
4807 case REAL_TYPE:
4808 return fold (convert_to_real (type, expr));
4809
4810 case RECORD_TYPE:
4811 /* Do a normal conversion between scalar and justified modular type. */
4812 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4813 {
4814 vec<constructor_elt, va_gc> *v;
4815 vec_alloc (v, 1);
4816
4817 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4818 convert (TREE_TYPE (TYPE_FIELDS (type)),
4819 expr));
4820 return gnat_build_constructor (type, v);
4821 }
4822
4823 /* In these cases, assume the front-end has validated the conversion.
4824 If the conversion is valid, it will be a bit-wise conversion, so
4825 it can be viewed as an unchecked conversion. */
4826 return unchecked_convert (type, expr, false);
4827
4828 case ARRAY_TYPE:
4829 /* Do a normal conversion between unconstrained and constrained array
4830 type, assuming the latter is a constrained version of the former. */
4831 if (TREE_CODE (expr) == INDIRECT_REF
4832 && ecode == ARRAY_TYPE
4833 && TREE_TYPE (etype) == TREE_TYPE (type))
4834 {
4835 tree ptr_type = build_pointer_type (type);
4836 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4837 fold_convert (ptr_type,
4838 TREE_OPERAND (expr, 0)));
4839 TREE_READONLY (t) = TREE_READONLY (expr);
4840 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4841 return t;
4842 }
4843
4844 /* In these cases, assume the front-end has validated the conversion.
4845 If the conversion is valid, it will be a bit-wise conversion, so
4846 it can be viewed as an unchecked conversion. */
4847 return unchecked_convert (type, expr, false);
4848
4849 case UNION_TYPE:
4850 /* This is a either a conversion between a tagged type and some
4851 subtype, which we have to mark as a UNION_TYPE because of
4852 overlapping fields or a conversion of an Unchecked_Union. */
4853 return unchecked_convert (type, expr, false);
4854
4855 case UNCONSTRAINED_ARRAY_TYPE:
4856 /* If the input is a VECTOR_TYPE, convert to the representative
4857 array type first. */
4858 if (ecode == VECTOR_TYPE)
4859 {
4860 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4861 etype = TREE_TYPE (expr);
4862 ecode = TREE_CODE (etype);
4863 }
4864
4865 /* If EXPR is a constrained array, take its address, convert it to a
4866 fat pointer, and then dereference it. Likewise if EXPR is a
4867 record containing both a template and a constrained array.
4868 Note that a record representing a justified modular type
4869 always represents a packed constrained array. */
4870 if (ecode == ARRAY_TYPE
4871 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4872 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4873 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4874 return
4875 build_unary_op
4876 (INDIRECT_REF, NULL_TREE,
4877 convert_to_fat_pointer (TREE_TYPE (type),
4878 build_unary_op (ADDR_EXPR,
4879 NULL_TREE, expr)));
4880
4881 /* Do something very similar for converting one unconstrained
4882 array to another. */
4883 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4884 return
4885 build_unary_op (INDIRECT_REF, NULL_TREE,
4886 convert (TREE_TYPE (type),
4887 build_unary_op (ADDR_EXPR,
4888 NULL_TREE, expr)));
4889 else
4890 gcc_unreachable ();
4891
4892 case COMPLEX_TYPE:
4893 return fold (convert_to_complex (type, expr));
4894
4895 default:
4896 gcc_unreachable ();
4897 }
4898 }
4899
4900 /* Create an expression whose value is that of EXPR converted to the common
4901 index type, which is sizetype. EXPR is supposed to be in the base type
4902 of the GNAT index type. Calling it is equivalent to doing
4903
4904 convert (sizetype, expr)
4905
4906 but we try to distribute the type conversion with the knowledge that EXPR
4907 cannot overflow in its type. This is a best-effort approach and we fall
4908 back to the above expression as soon as difficulties are encountered.
4909
4910 This is necessary to overcome issues that arise when the GNAT base index
4911 type and the GCC common index type (sizetype) don't have the same size,
4912 which is quite frequent on 64-bit architectures. In this case, and if
4913 the GNAT base index type is signed but the iteration type of the loop has
4914 been forced to unsigned, the loop scalar evolution engine cannot compute
4915 a simple evolution for the general induction variables associated with the
4916 array indices, because it will preserve the wrap-around semantics in the
4917 unsigned type of their "inner" part. As a result, many loop optimizations
4918 are blocked.
4919
4920 The solution is to use a special (basic) induction variable that is at
4921 least as large as sizetype, and to express the aforementioned general
4922 induction variables in terms of this induction variable, eliminating
4923 the problematic intermediate truncation to the GNAT base index type.
4924 This is possible as long as the original expression doesn't overflow
4925 and if the middle-end hasn't introduced artificial overflows in the
4926 course of the various simplification it can make to the expression. */
4927
4928 tree
4929 convert_to_index_type (tree expr)
4930 {
4931 enum tree_code code = TREE_CODE (expr);
4932 tree type = TREE_TYPE (expr);
4933
4934 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4935 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4936 if (TYPE_UNSIGNED (type) || !optimize)
4937 return convert (sizetype, expr);
4938
4939 switch (code)
4940 {
4941 case VAR_DECL:
4942 /* The main effect of the function: replace a loop parameter with its
4943 associated special induction variable. */
4944 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4945 expr = DECL_INDUCTION_VAR (expr);
4946 break;
4947
4948 CASE_CONVERT:
4949 {
4950 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4951 /* Bail out as soon as we suspect some sort of type frobbing. */
4952 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4953 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4954 break;
4955 }
4956
4957 /* ... fall through ... */
4958
4959 case NON_LVALUE_EXPR:
4960 return fold_build1 (code, sizetype,
4961 convert_to_index_type (TREE_OPERAND (expr, 0)));
4962
4963 case PLUS_EXPR:
4964 case MINUS_EXPR:
4965 case MULT_EXPR:
4966 return fold_build2 (code, sizetype,
4967 convert_to_index_type (TREE_OPERAND (expr, 0)),
4968 convert_to_index_type (TREE_OPERAND (expr, 1)));
4969
4970 case COMPOUND_EXPR:
4971 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4972 convert_to_index_type (TREE_OPERAND (expr, 1)));
4973
4974 case COND_EXPR:
4975 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4976 convert_to_index_type (TREE_OPERAND (expr, 1)),
4977 convert_to_index_type (TREE_OPERAND (expr, 2)));
4978
4979 default:
4980 break;
4981 }
4982
4983 return convert (sizetype, expr);
4984 }
4985 \f
4986 /* Remove all conversions that are done in EXP. This includes converting
4987 from a padded type or to a justified modular type. If TRUE_ADDRESS
4988 is true, always return the address of the containing object even if
4989 the address is not bit-aligned. */
4990
4991 tree
4992 remove_conversions (tree exp, bool true_address)
4993 {
4994 switch (TREE_CODE (exp))
4995 {
4996 case CONSTRUCTOR:
4997 if (true_address
4998 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4999 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5000 return
5001 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5002 break;
5003
5004 case COMPONENT_REF:
5005 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5006 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5007 break;
5008
5009 CASE_CONVERT:
5010 case VIEW_CONVERT_EXPR:
5011 case NON_LVALUE_EXPR:
5012 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5013
5014 default:
5015 break;
5016 }
5017
5018 return exp;
5019 }
5020 \f
5021 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5022 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5023 likewise return an expression pointing to the underlying array. */
5024
5025 tree
5026 maybe_unconstrained_array (tree exp)
5027 {
5028 enum tree_code code = TREE_CODE (exp);
5029 tree type = TREE_TYPE (exp);
5030
5031 switch (TREE_CODE (type))
5032 {
5033 case UNCONSTRAINED_ARRAY_TYPE:
5034 if (code == UNCONSTRAINED_ARRAY_REF)
5035 {
5036 const bool read_only = TREE_READONLY (exp);
5037 const bool no_trap = TREE_THIS_NOTRAP (exp);
5038
5039 exp = TREE_OPERAND (exp, 0);
5040 type = TREE_TYPE (exp);
5041
5042 if (TREE_CODE (exp) == COND_EXPR)
5043 {
5044 tree op1
5045 = build_unary_op (INDIRECT_REF, NULL_TREE,
5046 build_component_ref (TREE_OPERAND (exp, 1),
5047 TYPE_FIELDS (type),
5048 false));
5049 tree op2
5050 = build_unary_op (INDIRECT_REF, NULL_TREE,
5051 build_component_ref (TREE_OPERAND (exp, 2),
5052 TYPE_FIELDS (type),
5053 false));
5054
5055 exp = build3 (COND_EXPR,
5056 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5057 TREE_OPERAND (exp, 0), op1, op2);
5058 }
5059 else
5060 {
5061 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5062 build_component_ref (exp,
5063 TYPE_FIELDS (type),
5064 false));
5065 TREE_READONLY (exp) = read_only;
5066 TREE_THIS_NOTRAP (exp) = no_trap;
5067 }
5068 }
5069
5070 else if (code == NULL_EXPR)
5071 exp = build1 (NULL_EXPR,
5072 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5073 TREE_OPERAND (exp, 0));
5074 break;
5075
5076 case RECORD_TYPE:
5077 /* If this is a padded type and it contains a template, convert to the
5078 unpadded type first. */
5079 if (TYPE_PADDING_P (type)
5080 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5081 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5082 {
5083 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5084 code = TREE_CODE (exp);
5085 type = TREE_TYPE (exp);
5086 }
5087
5088 if (TYPE_CONTAINS_TEMPLATE_P (type))
5089 {
5090 /* If the array initializer is a box, return NULL_TREE. */
5091 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5092 return NULL_TREE;
5093
5094 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5095 false);
5096 type = TREE_TYPE (exp);
5097
5098 /* If the array type is padded, convert to the unpadded type. */
5099 if (TYPE_IS_PADDING_P (type))
5100 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5101 }
5102 break;
5103
5104 default:
5105 break;
5106 }
5107
5108 return exp;
5109 }
5110 \f
5111 /* Return true if EXPR is an expression that can be folded as an operand
5112 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5113
5114 static bool
5115 can_fold_for_view_convert_p (tree expr)
5116 {
5117 tree t1, t2;
5118
5119 /* The folder will fold NOP_EXPRs between integral types with the same
5120 precision (in the middle-end's sense). We cannot allow it if the
5121 types don't have the same precision in the Ada sense as well. */
5122 if (TREE_CODE (expr) != NOP_EXPR)
5123 return true;
5124
5125 t1 = TREE_TYPE (expr);
5126 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5127
5128 /* Defer to the folder for non-integral conversions. */
5129 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5130 return true;
5131
5132 /* Only fold conversions that preserve both precisions. */
5133 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5134 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5135 return true;
5136
5137 return false;
5138 }
5139
5140 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5141 If NOTRUNC_P is true, truncation operations should be suppressed.
5142
5143 Special care is required with (source or target) integral types whose
5144 precision is not equal to their size, to make sure we fetch or assign
5145 the value bits whose location might depend on the endianness, e.g.
5146
5147 Rmsize : constant := 8;
5148 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5149
5150 type Bit_Array is array (1 .. Rmsize) of Boolean;
5151 pragma Pack (Bit_Array);
5152
5153 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5154
5155 Value : Int := 2#1000_0001#;
5156 Vbits : Bit_Array := To_Bit_Array (Value);
5157
5158 we expect the 8 bits at Vbits'Address to always contain Value, while
5159 their original location depends on the endianness, at Value'Address
5160 on a little-endian architecture but not on a big-endian one.
5161
5162 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5163 the bits between the precision and the size are filled, because of the
5164 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5165 So we use the special predicate type_unsigned_for_rm above. */
5166
5167 tree
5168 unchecked_convert (tree type, tree expr, bool notrunc_p)
5169 {
5170 tree etype = TREE_TYPE (expr);
5171 enum tree_code ecode = TREE_CODE (etype);
5172 enum tree_code code = TREE_CODE (type);
5173 const bool ebiased
5174 = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5175 const bool biased
5176 = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5177 const bool ereverse
5178 = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5179 const bool reverse
5180 = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5181 tree tem;
5182 int c = 0;
5183
5184 /* If the expression is already of the right type, we are done. */
5185 if (etype == type)
5186 return expr;
5187
5188 /* If both types are integral just do a normal conversion.
5189 Likewise for a conversion to an unconstrained array. */
5190 if (((INTEGRAL_TYPE_P (type)
5191 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5192 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5193 && (INTEGRAL_TYPE_P (etype)
5194 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5195 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5196 || code == UNCONSTRAINED_ARRAY_TYPE)
5197 {
5198 if (ebiased)
5199 {
5200 tree ntype = copy_type (etype);
5201 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5202 TYPE_MAIN_VARIANT (ntype) = ntype;
5203 expr = build1 (NOP_EXPR, ntype, expr);
5204 }
5205
5206 if (biased)
5207 {
5208 tree rtype = copy_type (type);
5209 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5210 TYPE_MAIN_VARIANT (rtype) = rtype;
5211 expr = convert (rtype, expr);
5212 expr = build1 (NOP_EXPR, type, expr);
5213 }
5214 else
5215 expr = convert (type, expr);
5216 }
5217
5218 /* If we are converting to an integral type whose precision is not equal
5219 to its size, first unchecked convert to a record type that contains a
5220 field of the given precision. Then extract the result from the field.
5221
5222 There is a subtlety if the source type is an aggregate type with reverse
5223 storage order because its representation is not contiguous in the native
5224 storage order, i.e. a direct unchecked conversion to an integral type
5225 with N bits of precision cannot read the first N bits of the aggregate
5226 type. To overcome it, we do an unchecked conversion to an integral type
5227 with reverse storage order and return the resulting value. This also
5228 ensures that the result of the unchecked conversion doesn't depend on
5229 the endianness of the target machine, but only on the storage order of
5230 the aggregate type.
5231
5232 Finally, for the sake of consistency, we do the unchecked conversion
5233 to an integral type with reverse storage order as soon as the source
5234 type is an aggregate type with reverse storage order, even if there
5235 are no considerations of precision or size involved. Ultimately, we
5236 further extend this processing to any scalar type. */
5237 else if ((INTEGRAL_TYPE_P (type)
5238 && TYPE_RM_SIZE (type)
5239 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5240 TYPE_SIZE (type))) < 0
5241 || ereverse))
5242 || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5243 {
5244 tree rec_type = make_node (RECORD_TYPE);
5245 tree field_type, field;
5246
5247 TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5248
5249 if (c < 0)
5250 {
5251 const unsigned HOST_WIDE_INT prec
5252 = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5253 if (type_unsigned_for_rm (type))
5254 field_type = make_unsigned_type (prec);
5255 else
5256 field_type = make_signed_type (prec);
5257 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5258 }
5259 else
5260 field_type = type;
5261
5262 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5263 NULL_TREE, bitsize_zero_node, c < 0, 0);
5264
5265 finish_record_type (rec_type, field, 1, false);
5266
5267 expr = unchecked_convert (rec_type, expr, notrunc_p);
5268 expr = build_component_ref (expr, field, false);
5269 expr = fold_build1 (NOP_EXPR, type, expr);
5270 }
5271
5272 /* Similarly if we are converting from an integral type whose precision is
5273 not equal to its size, first copy into a field of the given precision
5274 and unchecked convert the record type.
5275
5276 The same considerations as above apply if the target type is an aggregate
5277 type with reverse storage order and we also proceed similarly. */
5278 else if ((INTEGRAL_TYPE_P (etype)
5279 && TYPE_RM_SIZE (etype)
5280 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5281 TYPE_SIZE (etype))) < 0
5282 || reverse))
5283 || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5284 {
5285 tree rec_type = make_node (RECORD_TYPE);
5286 vec<constructor_elt, va_gc> *v;
5287 vec_alloc (v, 1);
5288 tree field_type, field;
5289
5290 TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5291
5292 if (c < 0)
5293 {
5294 const unsigned HOST_WIDE_INT prec
5295 = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5296 if (type_unsigned_for_rm (etype))
5297 field_type = make_unsigned_type (prec);
5298 else
5299 field_type = make_signed_type (prec);
5300 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5301 }
5302 else
5303 field_type = etype;
5304
5305 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5306 NULL_TREE, bitsize_zero_node, c < 0, 0);
5307
5308 finish_record_type (rec_type, field, 1, false);
5309
5310 expr = fold_build1 (NOP_EXPR, field_type, expr);
5311 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5312 expr = gnat_build_constructor (rec_type, v);
5313 expr = unchecked_convert (type, expr, notrunc_p);
5314 }
5315
5316 /* If we are converting from a scalar type to a type with a different size,
5317 we need to pad to have the same size on both sides.
5318
5319 ??? We cannot do it unconditionally because unchecked conversions are
5320 used liberally by the front-end to implement polymorphism, e.g. in:
5321
5322 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5323 return p___size__4 (p__object!(S191s.all));
5324
5325 so we skip all expressions that are references. */
5326 else if (!REFERENCE_CLASS_P (expr)
5327 && !AGGREGATE_TYPE_P (etype)
5328 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5329 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5330 {
5331 if (c < 0)
5332 {
5333 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5334 false, false, false, true),
5335 expr);
5336 expr = unchecked_convert (type, expr, notrunc_p);
5337 }
5338 else
5339 {
5340 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5341 false, false, false, true);
5342 expr = unchecked_convert (rec_type, expr, notrunc_p);
5343 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5344 }
5345 }
5346
5347 /* We have a special case when we are converting between two unconstrained
5348 array types. In that case, take the address, convert the fat pointer
5349 types, and dereference. */
5350 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5351 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5352 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5353 build_unary_op (ADDR_EXPR, NULL_TREE,
5354 expr)));
5355
5356 /* Another special case is when we are converting to a vector type from its
5357 representative array type; this a regular conversion. */
5358 else if (code == VECTOR_TYPE
5359 && ecode == ARRAY_TYPE
5360 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5361 etype))
5362 expr = convert (type, expr);
5363
5364 /* And, if the array type is not the representative, we try to build an
5365 intermediate vector type of which the array type is the representative
5366 and to do the unchecked conversion between the vector types, in order
5367 to enable further simplifications in the middle-end. */
5368 else if (code == VECTOR_TYPE
5369 && ecode == ARRAY_TYPE
5370 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5371 {
5372 expr = convert (tem, expr);
5373 return unchecked_convert (type, expr, notrunc_p);
5374 }
5375
5376 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5377 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5378 else if (TREE_CODE (expr) == CONSTRUCTOR
5379 && code == RECORD_TYPE
5380 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5381 {
5382 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5383 Empty, false, false, false, true),
5384 expr);
5385 return unchecked_convert (type, expr, notrunc_p);
5386 }
5387
5388 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5389 else
5390 {
5391 expr = maybe_unconstrained_array (expr);
5392 etype = TREE_TYPE (expr);
5393 ecode = TREE_CODE (etype);
5394 if (can_fold_for_view_convert_p (expr))
5395 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5396 else
5397 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5398 }
5399
5400 /* If the result is a non-biased integral type whose precision is not equal
5401 to its size, sign- or zero-extend the result. But we need not do this
5402 if the input is also an integral type and both are unsigned or both are
5403 signed and have the same precision. */
5404 tree type_rm_size;
5405 if (!notrunc_p
5406 && !biased
5407 && INTEGRAL_TYPE_P (type)
5408 && (type_rm_size = TYPE_RM_SIZE (type))
5409 && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5410 && !(INTEGRAL_TYPE_P (etype)
5411 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5412 && (type_unsigned_for_rm (type)
5413 || tree_int_cst_compare (type_rm_size,
5414 TYPE_RM_SIZE (etype)
5415 ? TYPE_RM_SIZE (etype)
5416 : TYPE_SIZE (etype)) == 0)))
5417 {
5418 if (integer_zerop (type_rm_size))
5419 expr = build_int_cst (type, 0);
5420 else
5421 {
5422 tree base_type
5423 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5424 type_unsigned_for_rm (type));
5425 tree shift_expr
5426 = convert (base_type,
5427 size_binop (MINUS_EXPR,
5428 TYPE_SIZE (type), type_rm_size));
5429 expr
5430 = convert (type,
5431 build_binary_op (RSHIFT_EXPR, base_type,
5432 build_binary_op (LSHIFT_EXPR, base_type,
5433 convert (base_type,
5434 expr),
5435 shift_expr),
5436 shift_expr));
5437 }
5438 }
5439
5440 /* An unchecked conversion should never raise Constraint_Error. The code
5441 below assumes that GCC's conversion routines overflow the same way that
5442 the underlying hardware does. This is probably true. In the rare case
5443 when it is false, we can rely on the fact that such conversions are
5444 erroneous anyway. */
5445 if (TREE_CODE (expr) == INTEGER_CST)
5446 TREE_OVERFLOW (expr) = 0;
5447
5448 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5449 show no longer constant. */
5450 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5451 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5452 OEP_ONLY_CONST))
5453 TREE_CONSTANT (expr) = 0;
5454
5455 return expr;
5456 }
5457 \f
5458 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5459 the latter being a record type as predicated by Is_Record_Type. */
5460
5461 enum tree_code
5462 tree_code_for_record_type (Entity_Id gnat_type)
5463 {
5464 Node_Id component_list, component;
5465
5466 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5467 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5468 if (!Is_Unchecked_Union (gnat_type))
5469 return RECORD_TYPE;
5470
5471 gnat_type = Implementation_Base_Type (gnat_type);
5472 component_list
5473 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5474
5475 for (component = First_Non_Pragma (Component_Items (component_list));
5476 Present (component);
5477 component = Next_Non_Pragma (component))
5478 if (Ekind (Defining_Entity (component)) == E_Component)
5479 return RECORD_TYPE;
5480
5481 return UNION_TYPE;
5482 }
5483
5484 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5485 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5486 according to the presence of an alignment clause on the type or, if it
5487 is an array, on the component type. */
5488
5489 bool
5490 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5491 {
5492 gnat_type = Underlying_Type (gnat_type);
5493
5494 *align_clause = Present (Alignment_Clause (gnat_type));
5495
5496 if (Is_Array_Type (gnat_type))
5497 {
5498 gnat_type = Underlying_Type (Component_Type (gnat_type));
5499 if (Present (Alignment_Clause (gnat_type)))
5500 *align_clause = true;
5501 }
5502
5503 if (!Is_Floating_Point_Type (gnat_type))
5504 return false;
5505
5506 if (UI_To_Int (Esize (gnat_type)) != 64)
5507 return false;
5508
5509 return true;
5510 }
5511
5512 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5513 size is greater or equal to 64 bits, or an array of such a type. Set
5514 ALIGN_CLAUSE according to the presence of an alignment clause on the
5515 type or, if it is an array, on the component type. */
5516
5517 bool
5518 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5519 {
5520 gnat_type = Underlying_Type (gnat_type);
5521
5522 *align_clause = Present (Alignment_Clause (gnat_type));
5523
5524 if (Is_Array_Type (gnat_type))
5525 {
5526 gnat_type = Underlying_Type (Component_Type (gnat_type));
5527 if (Present (Alignment_Clause (gnat_type)))
5528 *align_clause = true;
5529 }
5530
5531 if (!Is_Scalar_Type (gnat_type))
5532 return false;
5533
5534 if (UI_To_Int (Esize (gnat_type)) < 64)
5535 return false;
5536
5537 return true;
5538 }
5539
5540 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5541 component of an aggregate type. */
5542
5543 bool
5544 type_for_nonaliased_component_p (tree gnu_type)
5545 {
5546 /* If the type is passed by reference, we may have pointers to the
5547 component so it cannot be made non-aliased. */
5548 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5549 return false;
5550
5551 /* We used to say that any component of aggregate type is aliased
5552 because the front-end may take 'Reference of it. The front-end
5553 has been enhanced in the meantime so as to use a renaming instead
5554 in most cases, but the back-end can probably take the address of
5555 such a component too so we go for the conservative stance.
5556
5557 For instance, we might need the address of any array type, even
5558 if normally passed by copy, to construct a fat pointer if the
5559 component is used as an actual for an unconstrained formal.
5560
5561 Likewise for record types: even if a specific record subtype is
5562 passed by copy, the parent type might be passed by ref (e.g. if
5563 it's of variable size) and we might take the address of a child
5564 component to pass to a parent formal. We have no way to check
5565 for such conditions here. */
5566 if (AGGREGATE_TYPE_P (gnu_type))
5567 return false;
5568
5569 return true;
5570 }
5571
5572 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5573
5574 bool
5575 smaller_form_type_p (tree type, tree orig_type)
5576 {
5577 tree size, osize;
5578
5579 /* We're not interested in variants here. */
5580 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5581 return false;
5582
5583 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5584 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5585 return false;
5586
5587 size = TYPE_SIZE (type);
5588 osize = TYPE_SIZE (orig_type);
5589
5590 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5591 return false;
5592
5593 return tree_int_cst_lt (size, osize) != 0;
5594 }
5595
5596 /* Return whether EXPR, which is the renamed object in an object renaming
5597 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5598 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5599
5600 bool
5601 can_materialize_object_renaming_p (Node_Id expr)
5602 {
5603 while (true)
5604 {
5605 expr = Original_Node (expr);
5606
5607 switch Nkind (expr)
5608 {
5609 case N_Identifier:
5610 case N_Expanded_Name:
5611 if (!Present (Renamed_Object (Entity (expr))))
5612 return true;
5613 expr = Renamed_Object (Entity (expr));
5614 break;
5615
5616 case N_Selected_Component:
5617 {
5618 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5619 return false;
5620
5621 const Uint bitpos
5622 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5623 if (!UI_Is_In_Int_Range (bitpos)
5624 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5625 return false;
5626
5627 expr = Prefix (expr);
5628 break;
5629 }
5630
5631 case N_Indexed_Component:
5632 case N_Slice:
5633 {
5634 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5635
5636 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5637 return false;
5638
5639 expr = Prefix (expr);
5640 break;
5641 }
5642
5643 case N_Explicit_Dereference:
5644 expr = Prefix (expr);
5645 break;
5646
5647 default:
5648 return true;
5649 };
5650 }
5651 }
5652
5653 /* Perform final processing on global declarations. */
5654
5655 static GTY (()) tree dummy_global;
5656
5657 void
5658 gnat_write_global_declarations (void)
5659 {
5660 unsigned int i;
5661 tree iter;
5662
5663 /* If we have declared types as used at the global level, insert them in
5664 the global hash table. We use a dummy variable for this purpose, but
5665 we need to build it unconditionally to avoid -fcompare-debug issues. */
5666 if (first_global_object_name)
5667 {
5668 struct varpool_node *node;
5669 char *label;
5670
5671 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5672 dummy_global
5673 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5674 void_type_node);
5675 DECL_HARD_REGISTER (dummy_global) = 1;
5676 TREE_STATIC (dummy_global) = 1;
5677 node = varpool_node::get_create (dummy_global);
5678 node->definition = 1;
5679 node->force_output = 1;
5680
5681 if (types_used_by_cur_var_decl)
5682 while (!types_used_by_cur_var_decl->is_empty ())
5683 {
5684 tree t = types_used_by_cur_var_decl->pop ();
5685 types_used_by_var_decl_insert (t, dummy_global);
5686 }
5687 }
5688
5689 /* Output debug information for all global type declarations first. This
5690 ensures that global types whose compilation hasn't been finalized yet,
5691 for example pointers to Taft amendment types, have their compilation
5692 finalized in the right context. */
5693 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5694 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5695 debug_hooks->type_decl (iter, false);
5696
5697 /* Output imported functions. */
5698 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5699 if (TREE_CODE (iter) == FUNCTION_DECL
5700 && DECL_EXTERNAL (iter)
5701 && DECL_INITIAL (iter) == NULL
5702 && !DECL_IGNORED_P (iter)
5703 && DECL_FUNCTION_IS_DEF (iter))
5704 debug_hooks->early_global_decl (iter);
5705
5706 /* Then output the global variables. We need to do that after the debug
5707 information for global types is emitted so that they are finalized. Skip
5708 external global variables, unless we need to emit debug info for them:
5709 this is useful for imported variables, for instance. */
5710 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5711 if (TREE_CODE (iter) == VAR_DECL
5712 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5713 rest_of_decl_compilation (iter, true, 0);
5714
5715 /* Output the imported modules/declarations. In GNAT, these are only
5716 materializing subprogram. */
5717 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5718 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5719 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5720 DECL_CONTEXT (iter), false, false);
5721 }
5722
5723 /* ************************************************************************
5724 * * GCC builtins support *
5725 * ************************************************************************ */
5726
5727 /* The general scheme is fairly simple:
5728
5729 For each builtin function/type to be declared, gnat_install_builtins calls
5730 internal facilities which eventually get to gnat_pushdecl, which in turn
5731 tracks the so declared builtin function decls in the 'builtin_decls' global
5732 datastructure. When an Intrinsic subprogram declaration is processed, we
5733 search this global datastructure to retrieve the associated BUILT_IN DECL
5734 node. */
5735
5736 /* Search the chain of currently available builtin declarations for a node
5737 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5738 found, if any, or NULL_TREE otherwise. */
5739 tree
5740 builtin_decl_for (tree name)
5741 {
5742 unsigned i;
5743 tree decl;
5744
5745 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5746 if (DECL_NAME (decl) == name)
5747 return decl;
5748
5749 return NULL_TREE;
5750 }
5751
5752 /* The code below eventually exposes gnat_install_builtins, which declares
5753 the builtin types and functions we might need, either internally or as
5754 user accessible facilities.
5755
5756 ??? This is a first implementation shot, still in rough shape. It is
5757 heavily inspired from the "C" family implementation, with chunks copied
5758 verbatim from there.
5759
5760 Two obvious improvement candidates are:
5761 o Use a more efficient name/decl mapping scheme
5762 o Devise a middle-end infrastructure to avoid having to copy
5763 pieces between front-ends. */
5764
5765 /* ----------------------------------------------------------------------- *
5766 * BUILTIN ELEMENTARY TYPES *
5767 * ----------------------------------------------------------------------- */
5768
5769 /* Standard data types to be used in builtin argument declarations. */
5770
5771 enum c_tree_index
5772 {
5773 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5774 CTI_STRING_TYPE,
5775 CTI_CONST_STRING_TYPE,
5776
5777 CTI_MAX
5778 };
5779
5780 static tree c_global_trees[CTI_MAX];
5781
5782 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5783 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5784 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5785
5786 /* ??? In addition some attribute handlers, we currently don't support a
5787 (small) number of builtin-types, which in turns inhibits support for a
5788 number of builtin functions. */
5789 #define wint_type_node void_type_node
5790 #define intmax_type_node void_type_node
5791 #define uintmax_type_node void_type_node
5792
5793 /* Used to help initialize the builtin-types.def table. When a type of
5794 the correct size doesn't exist, use error_mark_node instead of NULL.
5795 The later results in segfaults even when a decl using the type doesn't
5796 get invoked. */
5797
5798 static tree
5799 builtin_type_for_size (int size, bool unsignedp)
5800 {
5801 tree type = gnat_type_for_size (size, unsignedp);
5802 return type ? type : error_mark_node;
5803 }
5804
5805 /* Build/push the elementary type decls that builtin functions/types
5806 will need. */
5807
5808 static void
5809 install_builtin_elementary_types (void)
5810 {
5811 signed_size_type_node = gnat_signed_type_for (size_type_node);
5812 pid_type_node = integer_type_node;
5813
5814 string_type_node = build_pointer_type (char_type_node);
5815 const_string_type_node
5816 = build_pointer_type (build_qualified_type
5817 (char_type_node, TYPE_QUAL_CONST));
5818 }
5819
5820 /* ----------------------------------------------------------------------- *
5821 * BUILTIN FUNCTION TYPES *
5822 * ----------------------------------------------------------------------- */
5823
5824 /* Now, builtin function types per se. */
5825
5826 enum c_builtin_type
5827 {
5828 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5829 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5830 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5831 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5832 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5833 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5834 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5835 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5836 ARG6) NAME,
5837 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5838 ARG6, ARG7) NAME,
5839 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5840 ARG6, ARG7, ARG8) NAME,
5841 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5842 ARG6, ARG7, ARG8, ARG9) NAME,
5843 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5844 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5845 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5846 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5847 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5848 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5849 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5850 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5851 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5852 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5853 NAME,
5854 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5855 ARG6) NAME,
5856 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5857 ARG6, ARG7) NAME,
5858 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5859 #include "builtin-types.def"
5860 #undef DEF_PRIMITIVE_TYPE
5861 #undef DEF_FUNCTION_TYPE_0
5862 #undef DEF_FUNCTION_TYPE_1
5863 #undef DEF_FUNCTION_TYPE_2
5864 #undef DEF_FUNCTION_TYPE_3
5865 #undef DEF_FUNCTION_TYPE_4
5866 #undef DEF_FUNCTION_TYPE_5
5867 #undef DEF_FUNCTION_TYPE_6
5868 #undef DEF_FUNCTION_TYPE_7
5869 #undef DEF_FUNCTION_TYPE_8
5870 #undef DEF_FUNCTION_TYPE_9
5871 #undef DEF_FUNCTION_TYPE_10
5872 #undef DEF_FUNCTION_TYPE_11
5873 #undef DEF_FUNCTION_TYPE_VAR_0
5874 #undef DEF_FUNCTION_TYPE_VAR_1
5875 #undef DEF_FUNCTION_TYPE_VAR_2
5876 #undef DEF_FUNCTION_TYPE_VAR_3
5877 #undef DEF_FUNCTION_TYPE_VAR_4
5878 #undef DEF_FUNCTION_TYPE_VAR_5
5879 #undef DEF_FUNCTION_TYPE_VAR_6
5880 #undef DEF_FUNCTION_TYPE_VAR_7
5881 #undef DEF_POINTER_TYPE
5882 BT_LAST
5883 };
5884
5885 typedef enum c_builtin_type builtin_type;
5886
5887 /* A temporary array used in communication with def_fn_type. */
5888 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5889
5890 /* A helper function for install_builtin_types. Build function type
5891 for DEF with return type RET and N arguments. If VAR is true, then the
5892 function should be variadic after those N arguments.
5893
5894 Takes special care not to ICE if any of the types involved are
5895 error_mark_node, which indicates that said type is not in fact available
5896 (see builtin_type_for_size). In which case the function type as a whole
5897 should be error_mark_node. */
5898
5899 static void
5900 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5901 {
5902 tree t;
5903 tree *args = XALLOCAVEC (tree, n);
5904 va_list list;
5905 int i;
5906
5907 va_start (list, n);
5908 for (i = 0; i < n; ++i)
5909 {
5910 builtin_type a = (builtin_type) va_arg (list, int);
5911 t = builtin_types[a];
5912 if (t == error_mark_node)
5913 goto egress;
5914 args[i] = t;
5915 }
5916
5917 t = builtin_types[ret];
5918 if (t == error_mark_node)
5919 goto egress;
5920 if (var)
5921 t = build_varargs_function_type_array (t, n, args);
5922 else
5923 t = build_function_type_array (t, n, args);
5924
5925 egress:
5926 builtin_types[def] = t;
5927 va_end (list);
5928 }
5929
5930 /* Build the builtin function types and install them in the builtin_types
5931 array for later use in builtin function decls. */
5932
5933 static void
5934 install_builtin_function_types (void)
5935 {
5936 tree va_list_ref_type_node;
5937 tree va_list_arg_type_node;
5938
5939 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5940 {
5941 va_list_arg_type_node = va_list_ref_type_node =
5942 build_pointer_type (TREE_TYPE (va_list_type_node));
5943 }
5944 else
5945 {
5946 va_list_arg_type_node = va_list_type_node;
5947 va_list_ref_type_node = build_reference_type (va_list_type_node);
5948 }
5949
5950 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5951 builtin_types[ENUM] = VALUE;
5952 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5953 def_fn_type (ENUM, RETURN, 0, 0);
5954 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5955 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5956 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5957 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5958 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5959 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5960 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5961 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5962 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5963 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5964 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5965 ARG6) \
5966 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5967 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5968 ARG6, ARG7) \
5969 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5970 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5971 ARG6, ARG7, ARG8) \
5972 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5973 ARG7, ARG8);
5974 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5975 ARG6, ARG7, ARG8, ARG9) \
5976 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5977 ARG7, ARG8, ARG9);
5978 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5979 ARG6, ARG7, ARG8, ARG9, ARG10) \
5980 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5981 ARG7, ARG8, ARG9, ARG10);
5982 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5983 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5984 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5985 ARG7, ARG8, ARG9, ARG10, ARG11);
5986 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5987 def_fn_type (ENUM, RETURN, 1, 0);
5988 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5989 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5990 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5991 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5992 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5993 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5994 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5995 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5996 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5997 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5998 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5999 ARG6) \
6000 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6001 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6002 ARG6, ARG7) \
6003 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6004 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6005 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6006
6007 #include "builtin-types.def"
6008
6009 #undef DEF_PRIMITIVE_TYPE
6010 #undef DEF_FUNCTION_TYPE_0
6011 #undef DEF_FUNCTION_TYPE_1
6012 #undef DEF_FUNCTION_TYPE_2
6013 #undef DEF_FUNCTION_TYPE_3
6014 #undef DEF_FUNCTION_TYPE_4
6015 #undef DEF_FUNCTION_TYPE_5
6016 #undef DEF_FUNCTION_TYPE_6
6017 #undef DEF_FUNCTION_TYPE_7
6018 #undef DEF_FUNCTION_TYPE_8
6019 #undef DEF_FUNCTION_TYPE_9
6020 #undef DEF_FUNCTION_TYPE_10
6021 #undef DEF_FUNCTION_TYPE_11
6022 #undef DEF_FUNCTION_TYPE_VAR_0
6023 #undef DEF_FUNCTION_TYPE_VAR_1
6024 #undef DEF_FUNCTION_TYPE_VAR_2
6025 #undef DEF_FUNCTION_TYPE_VAR_3
6026 #undef DEF_FUNCTION_TYPE_VAR_4
6027 #undef DEF_FUNCTION_TYPE_VAR_5
6028 #undef DEF_FUNCTION_TYPE_VAR_6
6029 #undef DEF_FUNCTION_TYPE_VAR_7
6030 #undef DEF_POINTER_TYPE
6031 builtin_types[(int) BT_LAST] = NULL_TREE;
6032 }
6033
6034 /* ----------------------------------------------------------------------- *
6035 * BUILTIN ATTRIBUTES *
6036 * ----------------------------------------------------------------------- */
6037
6038 enum built_in_attribute
6039 {
6040 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6041 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6042 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6043 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6044 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6045 #include "builtin-attrs.def"
6046 #undef DEF_ATTR_NULL_TREE
6047 #undef DEF_ATTR_INT
6048 #undef DEF_ATTR_STRING
6049 #undef DEF_ATTR_IDENT
6050 #undef DEF_ATTR_TREE_LIST
6051 ATTR_LAST
6052 };
6053
6054 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6055
6056 static void
6057 install_builtin_attributes (void)
6058 {
6059 /* Fill in the built_in_attributes array. */
6060 #define DEF_ATTR_NULL_TREE(ENUM) \
6061 built_in_attributes[(int) ENUM] = NULL_TREE;
6062 #define DEF_ATTR_INT(ENUM, VALUE) \
6063 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6064 #define DEF_ATTR_STRING(ENUM, VALUE) \
6065 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6066 #define DEF_ATTR_IDENT(ENUM, STRING) \
6067 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6068 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6069 built_in_attributes[(int) ENUM] \
6070 = tree_cons (built_in_attributes[(int) PURPOSE], \
6071 built_in_attributes[(int) VALUE], \
6072 built_in_attributes[(int) CHAIN]);
6073 #include "builtin-attrs.def"
6074 #undef DEF_ATTR_NULL_TREE
6075 #undef DEF_ATTR_INT
6076 #undef DEF_ATTR_STRING
6077 #undef DEF_ATTR_IDENT
6078 #undef DEF_ATTR_TREE_LIST
6079 }
6080
6081 /* Handle a "const" attribute; arguments as in
6082 struct attribute_spec.handler. */
6083
6084 static tree
6085 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6086 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6087 bool *no_add_attrs)
6088 {
6089 if (TREE_CODE (*node) == FUNCTION_DECL)
6090 TREE_READONLY (*node) = 1;
6091 else
6092 *no_add_attrs = true;
6093
6094 return NULL_TREE;
6095 }
6096
6097 /* Handle a "nothrow" attribute; arguments as in
6098 struct attribute_spec.handler. */
6099
6100 static tree
6101 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6102 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6103 bool *no_add_attrs)
6104 {
6105 if (TREE_CODE (*node) == FUNCTION_DECL)
6106 TREE_NOTHROW (*node) = 1;
6107 else
6108 *no_add_attrs = true;
6109
6110 return NULL_TREE;
6111 }
6112
6113 /* Handle a "pure" attribute; arguments as in
6114 struct attribute_spec.handler. */
6115
6116 static tree
6117 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6118 int ARG_UNUSED (flags), bool *no_add_attrs)
6119 {
6120 if (TREE_CODE (*node) == FUNCTION_DECL)
6121 DECL_PURE_P (*node) = 1;
6122 /* TODO: support types. */
6123 else
6124 {
6125 warning (OPT_Wattributes, "%qs attribute ignored",
6126 IDENTIFIER_POINTER (name));
6127 *no_add_attrs = true;
6128 }
6129
6130 return NULL_TREE;
6131 }
6132
6133 /* Handle a "no vops" attribute; arguments as in
6134 struct attribute_spec.handler. */
6135
6136 static tree
6137 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6138 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6139 bool *ARG_UNUSED (no_add_attrs))
6140 {
6141 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6142 DECL_IS_NOVOPS (*node) = 1;
6143 return NULL_TREE;
6144 }
6145
6146 /* Helper for nonnull attribute handling; fetch the operand number
6147 from the attribute argument list. */
6148
6149 static bool
6150 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6151 {
6152 /* Verify the arg number is a constant. */
6153 if (!tree_fits_uhwi_p (arg_num_expr))
6154 return false;
6155
6156 *valp = TREE_INT_CST_LOW (arg_num_expr);
6157 return true;
6158 }
6159
6160 /* Handle the "nonnull" attribute. */
6161 static tree
6162 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6163 tree args, int ARG_UNUSED (flags),
6164 bool *no_add_attrs)
6165 {
6166 tree type = *node;
6167 unsigned HOST_WIDE_INT attr_arg_num;
6168
6169 /* If no arguments are specified, all pointer arguments should be
6170 non-null. Verify a full prototype is given so that the arguments
6171 will have the correct types when we actually check them later.
6172 Avoid diagnosing type-generic built-ins since those have no
6173 prototype. */
6174 if (!args)
6175 {
6176 if (!prototype_p (type)
6177 && (!TYPE_ATTRIBUTES (type)
6178 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6179 {
6180 error ("nonnull attribute without arguments on a non-prototype");
6181 *no_add_attrs = true;
6182 }
6183 return NULL_TREE;
6184 }
6185
6186 /* Argument list specified. Verify that each argument number references
6187 a pointer argument. */
6188 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6189 {
6190 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6191
6192 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6193 {
6194 error ("nonnull argument has invalid operand number (argument %lu)",
6195 (unsigned long) attr_arg_num);
6196 *no_add_attrs = true;
6197 return NULL_TREE;
6198 }
6199
6200 if (prototype_p (type))
6201 {
6202 function_args_iterator iter;
6203 tree argument;
6204
6205 function_args_iter_init (&iter, type);
6206 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6207 {
6208 argument = function_args_iter_cond (&iter);
6209 if (!argument || ck_num == arg_num)
6210 break;
6211 }
6212
6213 if (!argument
6214 || TREE_CODE (argument) == VOID_TYPE)
6215 {
6216 error ("nonnull argument with out-of-range operand number "
6217 "(argument %lu, operand %lu)",
6218 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6219 *no_add_attrs = true;
6220 return NULL_TREE;
6221 }
6222
6223 if (TREE_CODE (argument) != POINTER_TYPE)
6224 {
6225 error ("nonnull argument references non-pointer operand "
6226 "(argument %lu, operand %lu)",
6227 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6228 *no_add_attrs = true;
6229 return NULL_TREE;
6230 }
6231 }
6232 }
6233
6234 return NULL_TREE;
6235 }
6236
6237 /* Handle a "sentinel" attribute. */
6238
6239 static tree
6240 handle_sentinel_attribute (tree *node, tree name, tree args,
6241 int ARG_UNUSED (flags), bool *no_add_attrs)
6242 {
6243 if (!prototype_p (*node))
6244 {
6245 warning (OPT_Wattributes,
6246 "%qs attribute requires prototypes with named arguments",
6247 IDENTIFIER_POINTER (name));
6248 *no_add_attrs = true;
6249 }
6250 else
6251 {
6252 if (!stdarg_p (*node))
6253 {
6254 warning (OPT_Wattributes,
6255 "%qs attribute only applies to variadic functions",
6256 IDENTIFIER_POINTER (name));
6257 *no_add_attrs = true;
6258 }
6259 }
6260
6261 if (args)
6262 {
6263 tree position = TREE_VALUE (args);
6264
6265 if (TREE_CODE (position) != INTEGER_CST)
6266 {
6267 warning (0, "requested position is not an integer constant");
6268 *no_add_attrs = true;
6269 }
6270 else
6271 {
6272 if (tree_int_cst_lt (position, integer_zero_node))
6273 {
6274 warning (0, "requested position is less than zero");
6275 *no_add_attrs = true;
6276 }
6277 }
6278 }
6279
6280 return NULL_TREE;
6281 }
6282
6283 /* Handle a "noreturn" attribute; arguments as in
6284 struct attribute_spec.handler. */
6285
6286 static tree
6287 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6288 int ARG_UNUSED (flags), bool *no_add_attrs)
6289 {
6290 tree type = TREE_TYPE (*node);
6291
6292 /* See FIXME comment in c_common_attribute_table. */
6293 if (TREE_CODE (*node) == FUNCTION_DECL)
6294 TREE_THIS_VOLATILE (*node) = 1;
6295 else if (TREE_CODE (type) == POINTER_TYPE
6296 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6297 TREE_TYPE (*node)
6298 = build_pointer_type
6299 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6300 else
6301 {
6302 warning (OPT_Wattributes, "%qs attribute ignored",
6303 IDENTIFIER_POINTER (name));
6304 *no_add_attrs = true;
6305 }
6306
6307 return NULL_TREE;
6308 }
6309
6310 /* Handle a "noinline" attribute; arguments as in
6311 struct attribute_spec.handler. */
6312
6313 static tree
6314 handle_noinline_attribute (tree *node, tree name,
6315 tree ARG_UNUSED (args),
6316 int ARG_UNUSED (flags), bool *no_add_attrs)
6317 {
6318 if (TREE_CODE (*node) == FUNCTION_DECL)
6319 {
6320 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6321 {
6322 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6323 "with attribute %qs", name, "always_inline");
6324 *no_add_attrs = true;
6325 }
6326 else
6327 DECL_UNINLINABLE (*node) = 1;
6328 }
6329 else
6330 {
6331 warning (OPT_Wattributes, "%qE attribute ignored", name);
6332 *no_add_attrs = true;
6333 }
6334
6335 return NULL_TREE;
6336 }
6337
6338 /* Handle a "noclone" attribute; arguments as in
6339 struct attribute_spec.handler. */
6340
6341 static tree
6342 handle_noclone_attribute (tree *node, tree name,
6343 tree ARG_UNUSED (args),
6344 int ARG_UNUSED (flags), bool *no_add_attrs)
6345 {
6346 if (TREE_CODE (*node) != FUNCTION_DECL)
6347 {
6348 warning (OPT_Wattributes, "%qE attribute ignored", name);
6349 *no_add_attrs = true;
6350 }
6351
6352 return NULL_TREE;
6353 }
6354
6355 /* Handle a "leaf" attribute; arguments as in
6356 struct attribute_spec.handler. */
6357
6358 static tree
6359 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6360 int ARG_UNUSED (flags), bool *no_add_attrs)
6361 {
6362 if (TREE_CODE (*node) != FUNCTION_DECL)
6363 {
6364 warning (OPT_Wattributes, "%qE attribute ignored", name);
6365 *no_add_attrs = true;
6366 }
6367 if (!TREE_PUBLIC (*node))
6368 {
6369 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6370 *no_add_attrs = true;
6371 }
6372
6373 return NULL_TREE;
6374 }
6375
6376 /* Handle a "always_inline" attribute; arguments as in
6377 struct attribute_spec.handler. */
6378
6379 static tree
6380 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6381 int ARG_UNUSED (flags), bool *no_add_attrs)
6382 {
6383 if (TREE_CODE (*node) == FUNCTION_DECL)
6384 {
6385 /* Set the attribute and mark it for disregarding inline limits. */
6386 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6387 }
6388 else
6389 {
6390 warning (OPT_Wattributes, "%qE attribute ignored", name);
6391 *no_add_attrs = true;
6392 }
6393
6394 return NULL_TREE;
6395 }
6396
6397 /* Handle a "malloc" attribute; arguments as in
6398 struct attribute_spec.handler. */
6399
6400 static tree
6401 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6402 int ARG_UNUSED (flags), bool *no_add_attrs)
6403 {
6404 if (TREE_CODE (*node) == FUNCTION_DECL
6405 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6406 DECL_IS_MALLOC (*node) = 1;
6407 else
6408 {
6409 warning (OPT_Wattributes, "%qs attribute ignored",
6410 IDENTIFIER_POINTER (name));
6411 *no_add_attrs = true;
6412 }
6413
6414 return NULL_TREE;
6415 }
6416
6417 /* Fake handler for attributes we don't properly support. */
6418
6419 tree
6420 fake_attribute_handler (tree * ARG_UNUSED (node),
6421 tree ARG_UNUSED (name),
6422 tree ARG_UNUSED (args),
6423 int ARG_UNUSED (flags),
6424 bool * ARG_UNUSED (no_add_attrs))
6425 {
6426 return NULL_TREE;
6427 }
6428
6429 /* Handle a "type_generic" attribute. */
6430
6431 static tree
6432 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6433 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6434 bool * ARG_UNUSED (no_add_attrs))
6435 {
6436 /* Ensure we have a function type. */
6437 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6438
6439 /* Ensure we have a variadic function. */
6440 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6441
6442 return NULL_TREE;
6443 }
6444
6445 /* Handle a "vector_size" attribute; arguments as in
6446 struct attribute_spec.handler. */
6447
6448 static tree
6449 handle_vector_size_attribute (tree *node, tree name, tree args,
6450 int ARG_UNUSED (flags), bool *no_add_attrs)
6451 {
6452 tree type = *node;
6453 tree vector_type;
6454
6455 *no_add_attrs = true;
6456
6457 /* We need to provide for vector pointers, vector arrays, and
6458 functions returning vectors. For example:
6459
6460 __attribute__((vector_size(16))) short *foo;
6461
6462 In this case, the mode is SI, but the type being modified is
6463 HI, so we need to look further. */
6464 while (POINTER_TYPE_P (type)
6465 || TREE_CODE (type) == FUNCTION_TYPE
6466 || TREE_CODE (type) == ARRAY_TYPE)
6467 type = TREE_TYPE (type);
6468
6469 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6470 if (!vector_type)
6471 return NULL_TREE;
6472
6473 /* Build back pointers if needed. */
6474 *node = reconstruct_complex_type (*node, vector_type);
6475
6476 return NULL_TREE;
6477 }
6478
6479 /* Handle a "vector_type" attribute; arguments as in
6480 struct attribute_spec.handler. */
6481
6482 static tree
6483 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6484 int ARG_UNUSED (flags), bool *no_add_attrs)
6485 {
6486 tree type = *node;
6487 tree vector_type;
6488
6489 *no_add_attrs = true;
6490
6491 if (TREE_CODE (type) != ARRAY_TYPE)
6492 {
6493 error ("attribute %qs applies to array types only",
6494 IDENTIFIER_POINTER (name));
6495 return NULL_TREE;
6496 }
6497
6498 vector_type = build_vector_type_for_array (type, name);
6499 if (!vector_type)
6500 return NULL_TREE;
6501
6502 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6503 *node = vector_type;
6504
6505 return NULL_TREE;
6506 }
6507
6508 /* ----------------------------------------------------------------------- *
6509 * BUILTIN FUNCTIONS *
6510 * ----------------------------------------------------------------------- */
6511
6512 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6513 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6514 if nonansi_p and flag_no_nonansi_builtin. */
6515
6516 static void
6517 def_builtin_1 (enum built_in_function fncode,
6518 const char *name,
6519 enum built_in_class fnclass,
6520 tree fntype, tree libtype,
6521 bool both_p, bool fallback_p,
6522 bool nonansi_p ATTRIBUTE_UNUSED,
6523 tree fnattrs, bool implicit_p)
6524 {
6525 tree decl;
6526 const char *libname;
6527
6528 /* Preserve an already installed decl. It most likely was setup in advance
6529 (e.g. as part of the internal builtins) for specific reasons. */
6530 if (builtin_decl_explicit (fncode))
6531 return;
6532
6533 if (fntype == error_mark_node)
6534 return;
6535
6536 gcc_assert ((!both_p && !fallback_p)
6537 || !strncmp (name, "__builtin_",
6538 strlen ("__builtin_")));
6539
6540 libname = name + strlen ("__builtin_");
6541 decl = add_builtin_function (name, fntype, fncode, fnclass,
6542 (fallback_p ? libname : NULL),
6543 fnattrs);
6544 if (both_p)
6545 /* ??? This is normally further controlled by command-line options
6546 like -fno-builtin, but we don't have them for Ada. */
6547 add_builtin_function (libname, libtype, fncode, fnclass,
6548 NULL, fnattrs);
6549
6550 set_builtin_decl (fncode, decl, implicit_p);
6551 }
6552
6553 static int flag_isoc94 = 0;
6554 static int flag_isoc99 = 0;
6555 static int flag_isoc11 = 0;
6556
6557 /* Install what the common builtins.def offers. */
6558
6559 static void
6560 install_builtin_functions (void)
6561 {
6562 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6563 NONANSI_P, ATTRS, IMPLICIT, COND) \
6564 if (NAME && COND) \
6565 def_builtin_1 (ENUM, NAME, CLASS, \
6566 builtin_types[(int) TYPE], \
6567 builtin_types[(int) LIBTYPE], \
6568 BOTH_P, FALLBACK_P, NONANSI_P, \
6569 built_in_attributes[(int) ATTRS], IMPLICIT);
6570 #include "builtins.def"
6571 }
6572
6573 /* ----------------------------------------------------------------------- *
6574 * BUILTIN FUNCTIONS *
6575 * ----------------------------------------------------------------------- */
6576
6577 /* Install the builtin functions we might need. */
6578
6579 void
6580 gnat_install_builtins (void)
6581 {
6582 install_builtin_elementary_types ();
6583 install_builtin_function_types ();
6584 install_builtin_attributes ();
6585
6586 /* Install builtins used by generic middle-end pieces first. Some of these
6587 know about internal specificities and control attributes accordingly, for
6588 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6589 the generic definition from builtins.def. */
6590 build_common_builtin_nodes ();
6591
6592 /* Now, install the target specific builtins, such as the AltiVec family on
6593 ppc, and the common set as exposed by builtins.def. */
6594 targetm.init_builtins ();
6595 install_builtin_functions ();
6596 }
6597
6598 #include "gt-ada-utils.h"
6599 #include "gtype-ada.h"