d9c9209ed84fdf53939e17c84cc29c85f79abb81
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2019, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_stack_protect_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_noicf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_noipa_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_flatten_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_used_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_cold_attribute (tree *, tree, tree, int, bool *);
105 static tree handle_hot_attribute (tree *, tree, tree, int, bool *);
106 static tree handle_target_attribute (tree *, tree, tree, int, bool *);
107 static tree handle_target_clones_attribute (tree *, tree, tree, int, bool *);
108 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
109 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
110
111 static const struct attribute_spec::exclusions attr_cold_hot_exclusions[] =
112 {
113 { "cold", true, true, true },
114 { "hot" , true, true, true },
115 { NULL , false, false, false }
116 };
117
118 /* Fake handler for attributes we don't properly support, typically because
119 they'd require dragging a lot of the common-c front-end circuitry. */
120 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
121
122 /* Table of machine-independent internal attributes for Ada. We support
123 this minimal set of attributes to accommodate the needs of builtins. */
124 const struct attribute_spec gnat_internal_attribute_table[] =
125 {
126 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
127 affects_type_identity, handler, exclude } */
128 { "const", 0, 0, true, false, false, false,
129 handle_const_attribute, NULL },
130 { "nothrow", 0, 0, true, false, false, false,
131 handle_nothrow_attribute, NULL },
132 { "pure", 0, 0, true, false, false, false,
133 handle_pure_attribute, NULL },
134 { "no vops", 0, 0, true, false, false, false,
135 handle_novops_attribute, NULL },
136 { "nonnull", 0, -1, false, true, true, false,
137 handle_nonnull_attribute, NULL },
138 { "sentinel", 0, 1, false, true, true, false,
139 handle_sentinel_attribute, NULL },
140 { "noreturn", 0, 0, true, false, false, false,
141 handle_noreturn_attribute, NULL },
142 { "stack_protect",0, 0, true, false, false, false,
143 handle_stack_protect_attribute, NULL },
144 { "noinline", 0, 0, true, false, false, false,
145 handle_noinline_attribute, NULL },
146 { "noclone", 0, 0, true, false, false, false,
147 handle_noclone_attribute, NULL },
148 { "no_icf", 0, 0, true, false, false, false,
149 handle_noicf_attribute, NULL },
150 { "noipa", 0, 0, true, false, false, false,
151 handle_noipa_attribute, NULL },
152 { "leaf", 0, 0, true, false, false, false,
153 handle_leaf_attribute, NULL },
154 { "always_inline",0, 0, true, false, false, false,
155 handle_always_inline_attribute, NULL },
156 { "malloc", 0, 0, true, false, false, false,
157 handle_malloc_attribute, NULL },
158 { "type generic", 0, 0, false, true, true, false,
159 handle_type_generic_attribute, NULL },
160
161 { "flatten", 0, 0, true, false, false, false,
162 handle_flatten_attribute, NULL },
163 { "used", 0, 0, true, false, false, false,
164 handle_used_attribute, NULL },
165 { "cold", 0, 0, true, false, false, false,
166 handle_cold_attribute, attr_cold_hot_exclusions },
167 { "hot", 0, 0, true, false, false, false,
168 handle_hot_attribute, attr_cold_hot_exclusions },
169 { "target", 1, -1, true, false, false, false,
170 handle_target_attribute, NULL },
171 { "target_clones",1, -1, true, false, false, false,
172 handle_target_clones_attribute, NULL },
173
174 { "vector_size", 1, 1, false, true, false, false,
175 handle_vector_size_attribute, NULL },
176 { "vector_type", 0, 0, false, true, false, false,
177 handle_vector_type_attribute, NULL },
178 { "may_alias", 0, 0, false, true, false, false,
179 NULL, NULL },
180
181 /* ??? format and format_arg are heavy and not supported, which actually
182 prevents support for stdio builtins, which we however declare as part
183 of the common builtins.def contents. */
184 { "format", 3, 3, false, true, true, false,
185 fake_attribute_handler, NULL },
186 { "format_arg", 1, 1, false, true, true, false,
187 fake_attribute_handler, NULL },
188
189 { NULL, 0, 0, false, false, false, false,
190 NULL, NULL }
191 };
192
193 /* Associates a GNAT tree node to a GCC tree node. It is used in
194 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
195 of `save_gnu_tree' for more info. */
196 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
197
198 #define GET_GNU_TREE(GNAT_ENTITY) \
199 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
200
201 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
202 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
203
204 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
205 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
206
207 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
208 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
209
210 #define GET_DUMMY_NODE(GNAT_ENTITY) \
211 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
212
213 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
214 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
215
216 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
217 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
218
219 /* This variable keeps a table for types for each precision so that we only
220 allocate each of them once. Signed and unsigned types are kept separate.
221
222 Note that these types are only used when fold-const requests something
223 special. Perhaps we should NOT share these types; we'll see how it
224 goes later. */
225 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
226
227 /* Likewise for float types, but record these by mode. */
228 static GTY(()) tree float_types[NUM_MACHINE_MODES];
229
230 /* For each binding contour we allocate a binding_level structure to indicate
231 the binding depth. */
232
233 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
234 /* The binding level containing this one (the enclosing binding level). */
235 struct gnat_binding_level *chain;
236 /* The BLOCK node for this level. */
237 tree block;
238 /* If nonzero, the setjmp buffer that needs to be updated for any
239 variable-sized definition within this context. */
240 tree jmpbuf_decl;
241 };
242
243 /* The binding level currently in effect. */
244 static GTY(()) struct gnat_binding_level *current_binding_level;
245
246 /* A chain of gnat_binding_level structures awaiting reuse. */
247 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
248
249 /* The context to be used for global declarations. */
250 static GTY(()) tree global_context;
251
252 /* An array of global declarations. */
253 static GTY(()) vec<tree, va_gc> *global_decls;
254
255 /* An array of builtin function declarations. */
256 static GTY(()) vec<tree, va_gc> *builtin_decls;
257
258 /* A chain of unused BLOCK nodes. */
259 static GTY((deletable)) tree free_block_chain;
260
261 /* A hash table of padded types. It is modelled on the generic type
262 hash table in tree.c, which must thus be used as a reference. */
263
264 struct GTY((for_user)) pad_type_hash
265 {
266 hashval_t hash;
267 tree type;
268 };
269
270 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
271 {
272 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
273 static bool equal (pad_type_hash *a, pad_type_hash *b);
274
275 static int
276 keep_cache_entry (pad_type_hash *&t)
277 {
278 return ggc_marked_p (t->type);
279 }
280 };
281
282 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
283
284 static tree merge_sizes (tree, tree, tree, bool, bool);
285 static tree fold_bit_position (const_tree);
286 static tree compute_related_constant (tree, tree);
287 static tree split_plus (tree, tree *);
288 static tree float_type_for_precision (int, machine_mode);
289 static tree convert_to_fat_pointer (tree, tree);
290 static unsigned int scale_by_factor_of (tree, unsigned int);
291 static bool potential_alignment_gap (tree, tree, tree);
292
293 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
294 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
295 struct deferred_decl_context_node
296 {
297 /* The ..._DECL node to work on. */
298 tree decl;
299
300 /* The corresponding entity's Scope. */
301 Entity_Id gnat_scope;
302
303 /* The value of force_global when DECL was pushed. */
304 int force_global;
305
306 /* The list of ..._TYPE nodes to propagate the context to. */
307 vec<tree> types;
308
309 /* The next queue item. */
310 struct deferred_decl_context_node *next;
311 };
312
313 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
314
315 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
316 feed it with the elaboration of GNAT_SCOPE. */
317 static struct deferred_decl_context_node *
318 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
319
320 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
321 feed it with the DECL_CONTEXT computed as part of N as soon as it is
322 computed. */
323 static void add_deferred_type_context (struct deferred_decl_context_node *n,
324 tree type);
325 \f
326 /* Initialize data structures of the utils.c module. */
327
328 void
329 init_gnat_utils (void)
330 {
331 /* Initialize the association of GNAT nodes to GCC trees. */
332 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
333
334 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
335 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
336
337 /* Initialize the hash table of padded types. */
338 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
339 }
340
341 /* Destroy data structures of the utils.c module. */
342
343 void
344 destroy_gnat_utils (void)
345 {
346 /* Destroy the association of GNAT nodes to GCC trees. */
347 ggc_free (associate_gnat_to_gnu);
348 associate_gnat_to_gnu = NULL;
349
350 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
351 ggc_free (dummy_node_table);
352 dummy_node_table = NULL;
353
354 /* Destroy the hash table of padded types. */
355 pad_type_hash_table->empty ();
356 pad_type_hash_table = NULL;
357 }
358 \f
359 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
360 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
361 If NO_CHECK is true, the latter check is suppressed.
362
363 If GNU_DECL is zero, reset a previous association. */
364
365 void
366 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
367 {
368 /* Check that GNAT_ENTITY is not already defined and that it is being set
369 to something which is a decl. If that is not the case, this usually
370 means GNAT_ENTITY is defined twice, but occasionally is due to some
371 Gigi problem. */
372 gcc_assert (!(gnu_decl
373 && (PRESENT_GNU_TREE (gnat_entity)
374 || (!no_check && !DECL_P (gnu_decl)))));
375
376 SET_GNU_TREE (gnat_entity, gnu_decl);
377 }
378
379 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
380 that was associated with it. If there is no such tree node, abort.
381
382 In some cases, such as delayed elaboration or expressions that need to
383 be elaborated only once, GNAT_ENTITY is really not an entity. */
384
385 tree
386 get_gnu_tree (Entity_Id gnat_entity)
387 {
388 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
389 return GET_GNU_TREE (gnat_entity);
390 }
391
392 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
393
394 bool
395 present_gnu_tree (Entity_Id gnat_entity)
396 {
397 return PRESENT_GNU_TREE (gnat_entity);
398 }
399 \f
400 /* Make a dummy type corresponding to GNAT_TYPE. */
401
402 tree
403 make_dummy_type (Entity_Id gnat_type)
404 {
405 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
406 tree gnu_type, debug_type;
407
408 /* If there was no equivalent type (can only happen when just annotating
409 types) or underlying type, go back to the original type. */
410 if (No (gnat_equiv))
411 gnat_equiv = gnat_type;
412
413 /* If it there already a dummy type, use that one. Else make one. */
414 if (PRESENT_DUMMY_NODE (gnat_equiv))
415 return GET_DUMMY_NODE (gnat_equiv);
416
417 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
418 an ENUMERAL_TYPE. */
419 gnu_type = make_node (Is_Record_Type (gnat_equiv)
420 ? tree_code_for_record_type (gnat_equiv)
421 : ENUMERAL_TYPE);
422 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
423 TYPE_DUMMY_P (gnu_type) = 1;
424 TYPE_STUB_DECL (gnu_type)
425 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
426 if (Is_By_Reference_Type (gnat_equiv))
427 TYPE_BY_REFERENCE_P (gnu_type) = 1;
428
429 SET_DUMMY_NODE (gnat_equiv, gnu_type);
430
431 /* Create a debug type so that debuggers only see an unspecified type. */
432 if (Needs_Debug_Info (gnat_type))
433 {
434 debug_type = make_node (LANG_TYPE);
435 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
436 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
437 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
438 }
439
440 return gnu_type;
441 }
442
443 /* Return the dummy type that was made for GNAT_TYPE, if any. */
444
445 tree
446 get_dummy_type (Entity_Id gnat_type)
447 {
448 return GET_DUMMY_NODE (gnat_type);
449 }
450
451 /* Build dummy fat and thin pointer types whose designated type is specified
452 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
453
454 void
455 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
456 {
457 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
458 tree gnu_fat_type, fields, gnu_object_type;
459
460 gnu_template_type = make_node (RECORD_TYPE);
461 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
462 TYPE_DUMMY_P (gnu_template_type) = 1;
463 gnu_ptr_template = build_pointer_type (gnu_template_type);
464
465 gnu_array_type = make_node (ENUMERAL_TYPE);
466 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
467 TYPE_DUMMY_P (gnu_array_type) = 1;
468 gnu_ptr_array = build_pointer_type (gnu_array_type);
469
470 gnu_fat_type = make_node (RECORD_TYPE);
471 /* Build a stub DECL to trigger the special processing for fat pointer types
472 in gnat_pushdecl. */
473 TYPE_NAME (gnu_fat_type)
474 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
475 gnu_fat_type);
476 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
477 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
478 DECL_CHAIN (fields)
479 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
480 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
481 finish_fat_pointer_type (gnu_fat_type, fields);
482 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
483 /* Suppress debug info until after the type is completed. */
484 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
485
486 gnu_object_type = make_node (RECORD_TYPE);
487 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
488 TYPE_DUMMY_P (gnu_object_type) = 1;
489
490 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
491 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
492 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
493 }
494 \f
495 /* Return true if we are in the global binding level. */
496
497 bool
498 global_bindings_p (void)
499 {
500 return force_global || !current_function_decl;
501 }
502
503 /* Enter a new binding level. */
504
505 void
506 gnat_pushlevel (void)
507 {
508 struct gnat_binding_level *newlevel = NULL;
509
510 /* Reuse a struct for this binding level, if there is one. */
511 if (free_binding_level)
512 {
513 newlevel = free_binding_level;
514 free_binding_level = free_binding_level->chain;
515 }
516 else
517 newlevel = ggc_alloc<gnat_binding_level> ();
518
519 /* Use a free BLOCK, if any; otherwise, allocate one. */
520 if (free_block_chain)
521 {
522 newlevel->block = free_block_chain;
523 free_block_chain = BLOCK_CHAIN (free_block_chain);
524 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
525 }
526 else
527 newlevel->block = make_node (BLOCK);
528
529 /* Point the BLOCK we just made to its parent. */
530 if (current_binding_level)
531 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
532
533 BLOCK_VARS (newlevel->block) = NULL_TREE;
534 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
535 TREE_USED (newlevel->block) = 1;
536
537 /* Add this level to the front of the chain (stack) of active levels. */
538 newlevel->chain = current_binding_level;
539 newlevel->jmpbuf_decl = NULL_TREE;
540 current_binding_level = newlevel;
541 }
542
543 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
544 and point FNDECL to this BLOCK. */
545
546 void
547 set_current_block_context (tree fndecl)
548 {
549 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
550 DECL_INITIAL (fndecl) = current_binding_level->block;
551 set_block_for_group (current_binding_level->block);
552 }
553
554 /* Set the jmpbuf_decl for the current binding level to DECL. */
555
556 void
557 set_block_jmpbuf_decl (tree decl)
558 {
559 current_binding_level->jmpbuf_decl = decl;
560 }
561
562 /* Get the jmpbuf_decl, if any, for the current binding level. */
563
564 tree
565 get_block_jmpbuf_decl (void)
566 {
567 return current_binding_level->jmpbuf_decl;
568 }
569
570 /* Exit a binding level. Set any BLOCK into the current code group. */
571
572 void
573 gnat_poplevel (void)
574 {
575 struct gnat_binding_level *level = current_binding_level;
576 tree block = level->block;
577
578 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
579 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
580
581 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
582 are no variables free the block and merge its subblocks into those of its
583 parent block. Otherwise, add it to the list of its parent. */
584 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
585 ;
586 else if (!BLOCK_VARS (block))
587 {
588 BLOCK_SUBBLOCKS (level->chain->block)
589 = block_chainon (BLOCK_SUBBLOCKS (block),
590 BLOCK_SUBBLOCKS (level->chain->block));
591 BLOCK_CHAIN (block) = free_block_chain;
592 free_block_chain = block;
593 }
594 else
595 {
596 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
597 BLOCK_SUBBLOCKS (level->chain->block) = block;
598 TREE_USED (block) = 1;
599 set_block_for_group (block);
600 }
601
602 /* Free this binding structure. */
603 current_binding_level = level->chain;
604 level->chain = free_binding_level;
605 free_binding_level = level;
606 }
607
608 /* Exit a binding level and discard the associated BLOCK. */
609
610 void
611 gnat_zaplevel (void)
612 {
613 struct gnat_binding_level *level = current_binding_level;
614 tree block = level->block;
615
616 BLOCK_CHAIN (block) = free_block_chain;
617 free_block_chain = block;
618
619 /* Free this binding structure. */
620 current_binding_level = level->chain;
621 level->chain = free_binding_level;
622 free_binding_level = level;
623 }
624 \f
625 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
626
627 static void
628 gnat_set_type_context (tree type, tree context)
629 {
630 tree decl = TYPE_STUB_DECL (type);
631
632 TYPE_CONTEXT (type) = context;
633
634 while (decl && DECL_PARALLEL_TYPE (decl))
635 {
636 tree parallel_type = DECL_PARALLEL_TYPE (decl);
637
638 /* Give a context to the parallel types and their stub decl, if any.
639 Some parallel types seems to be present in multiple parallel type
640 chains, so don't mess with their context if they already have one. */
641 if (!TYPE_CONTEXT (parallel_type))
642 {
643 if (TYPE_STUB_DECL (parallel_type))
644 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
645 TYPE_CONTEXT (parallel_type) = context;
646 }
647
648 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
649 }
650 }
651
652 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
653 the debug info, or Empty if there is no such scope. If not NULL, set
654 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
655
656 Entity_Id
657 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
658 {
659 Entity_Id gnat_entity;
660
661 if (is_subprogram)
662 *is_subprogram = false;
663
664 if (Nkind (gnat_node) == N_Defining_Identifier
665 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
666 gnat_entity = Scope (gnat_node);
667 else
668 return Empty;
669
670 while (Present (gnat_entity))
671 {
672 switch (Ekind (gnat_entity))
673 {
674 case E_Function:
675 case E_Procedure:
676 if (Present (Protected_Body_Subprogram (gnat_entity)))
677 gnat_entity = Protected_Body_Subprogram (gnat_entity);
678
679 /* If the scope is a subprogram, then just rely on
680 current_function_decl, so that we don't have to defer
681 anything. This is needed because other places rely on the
682 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
683 if (is_subprogram)
684 *is_subprogram = true;
685 return gnat_entity;
686
687 case E_Record_Type:
688 case E_Record_Subtype:
689 return gnat_entity;
690
691 default:
692 /* By default, we are not interested in this particular scope: go to
693 the outer one. */
694 break;
695 }
696
697 gnat_entity = Scope (gnat_entity);
698 }
699
700 return Empty;
701 }
702
703 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
704 of N otherwise. */
705
706 static void
707 defer_or_set_type_context (tree type, tree context,
708 struct deferred_decl_context_node *n)
709 {
710 if (n)
711 add_deferred_type_context (n, type);
712 else
713 gnat_set_type_context (type, context);
714 }
715
716 /* Return global_context, but create it first if need be. */
717
718 static tree
719 get_global_context (void)
720 {
721 if (!global_context)
722 {
723 global_context
724 = build_translation_unit_decl (get_identifier (main_input_filename));
725 debug_hooks->register_main_translation_unit (global_context);
726 }
727
728 return global_context;
729 }
730
731 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
732 for location information and flag propagation. */
733
734 void
735 gnat_pushdecl (tree decl, Node_Id gnat_node)
736 {
737 tree context = NULL_TREE;
738 struct deferred_decl_context_node *deferred_decl_context = NULL;
739
740 /* If explicitely asked to make DECL global or if it's an imported nested
741 object, short-circuit the regular Scope-based context computation. */
742 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
743 {
744 /* Rely on the GNAT scope, or fallback to the current_function_decl if
745 the GNAT scope reached the global scope, if it reached a subprogram
746 or the declaration is a subprogram or a variable (for them we skip
747 intermediate context types because the subprogram body elaboration
748 machinery and the inliner both expect a subprogram context).
749
750 Falling back to current_function_decl is necessary for implicit
751 subprograms created by gigi, such as the elaboration subprograms. */
752 bool context_is_subprogram = false;
753 const Entity_Id gnat_scope
754 = get_debug_scope (gnat_node, &context_is_subprogram);
755
756 if (Present (gnat_scope)
757 && !context_is_subprogram
758 && TREE_CODE (decl) != FUNCTION_DECL
759 && TREE_CODE (decl) != VAR_DECL)
760 /* Always assume the scope has not been elaborated, thus defer the
761 context propagation to the time its elaboration will be
762 available. */
763 deferred_decl_context
764 = add_deferred_decl_context (decl, gnat_scope, force_global);
765
766 /* External declarations (when force_global > 0) may not be in a
767 local context. */
768 else if (current_function_decl && force_global == 0)
769 context = current_function_decl;
770 }
771
772 /* If either we are forced to be in global mode or if both the GNAT scope and
773 the current_function_decl did not help in determining the context, use the
774 global scope. */
775 if (!deferred_decl_context && !context)
776 context = get_global_context ();
777
778 /* Functions imported in another function are not really nested.
779 For really nested functions mark them initially as needing
780 a static chain for uses of that flag before unnesting;
781 lower_nested_functions will then recompute it. */
782 if (TREE_CODE (decl) == FUNCTION_DECL
783 && !TREE_PUBLIC (decl)
784 && context
785 && (TREE_CODE (context) == FUNCTION_DECL
786 || decl_function_context (context)))
787 DECL_STATIC_CHAIN (decl) = 1;
788
789 if (!deferred_decl_context)
790 DECL_CONTEXT (decl) = context;
791
792 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
793
794 /* Set the location of DECL and emit a declaration for it. */
795 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
796 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
797
798 add_decl_expr (decl, gnat_node);
799
800 /* Put the declaration on the list. The list of declarations is in reverse
801 order. The list will be reversed later. Put global declarations in the
802 globals list and local ones in the current block. But skip TYPE_DECLs
803 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
804 with the debugger and aren't needed anyway. */
805 if (!(TREE_CODE (decl) == TYPE_DECL
806 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
807 {
808 /* External declarations must go to the binding level they belong to.
809 This will make corresponding imported entities are available in the
810 debugger at the proper time. */
811 if (DECL_EXTERNAL (decl)
812 && TREE_CODE (decl) == FUNCTION_DECL
813 && fndecl_built_in_p (decl))
814 vec_safe_push (builtin_decls, decl);
815 else if (global_bindings_p ())
816 vec_safe_push (global_decls, decl);
817 else
818 {
819 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
820 BLOCK_VARS (current_binding_level->block) = decl;
821 }
822 }
823
824 /* For the declaration of a type, set its name either if it isn't already
825 set or if the previous type name was not derived from a source name.
826 We'd rather have the type named with a real name and all the pointer
827 types to the same object have the same node, except when the names are
828 both derived from source names. */
829 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
830 {
831 tree t = TREE_TYPE (decl);
832
833 /* Array and pointer types aren't tagged types in the C sense so we need
834 to generate a typedef in DWARF for them and make sure it is preserved,
835 unless the type is artificial. */
836 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
837 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
838 || DECL_ARTIFICIAL (decl)))
839 ;
840 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
841 generate the typedef in DWARF. Also do that for fat pointer types
842 because, even though they are tagged types in the C sense, they are
843 still XUP types attached to the base array type at this point. */
844 else if (!DECL_ARTIFICIAL (decl)
845 && (TREE_CODE (t) == ARRAY_TYPE
846 || TREE_CODE (t) == POINTER_TYPE
847 || TYPE_IS_FAT_POINTER_P (t)))
848 {
849 tree tt = build_variant_type_copy (t);
850 TYPE_NAME (tt) = decl;
851 defer_or_set_type_context (tt,
852 DECL_CONTEXT (decl),
853 deferred_decl_context);
854 TREE_TYPE (decl) = tt;
855 if (TYPE_NAME (t)
856 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
857 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
858 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
859 else
860 DECL_ORIGINAL_TYPE (decl) = t;
861 /* Array types need to have a name so that they can be related to
862 their GNAT encodings. */
863 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
864 TYPE_NAME (t) = DECL_NAME (decl);
865 t = NULL_TREE;
866 }
867 else if (TYPE_NAME (t)
868 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
869 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
870 ;
871 else
872 t = NULL_TREE;
873
874 /* Propagate the name to all the variants, this is needed for the type
875 qualifiers machinery to work properly (see check_qualified_type).
876 Also propagate the context to them. Note that it will be propagated
877 to all parallel types too thanks to gnat_set_type_context. */
878 if (t)
879 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
880 /* ??? Because of the previous kludge, we can have variants of fat
881 pointer types with different names. */
882 if (!(TYPE_IS_FAT_POINTER_P (t)
883 && TYPE_NAME (t)
884 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
885 {
886 TYPE_NAME (t) = decl;
887 defer_or_set_type_context (t,
888 DECL_CONTEXT (decl),
889 deferred_decl_context);
890 }
891 }
892 }
893 \f
894 /* Create a record type that contains a SIZE bytes long field of TYPE with a
895 starting bit position so that it is aligned to ALIGN bits, and leaving at
896 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
897 record is guaranteed to get. GNAT_NODE is used for the position of the
898 associated TYPE_DECL. */
899
900 tree
901 make_aligning_type (tree type, unsigned int align, tree size,
902 unsigned int base_align, int room, Node_Id gnat_node)
903 {
904 /* We will be crafting a record type with one field at a position set to be
905 the next multiple of ALIGN past record'address + room bytes. We use a
906 record placeholder to express record'address. */
907 tree record_type = make_node (RECORD_TYPE);
908 tree record = build0 (PLACEHOLDER_EXPR, record_type);
909
910 tree record_addr_st
911 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
912
913 /* The diagram below summarizes the shape of what we manipulate:
914
915 <--------- pos ---------->
916 { +------------+-------------+-----------------+
917 record =>{ |############| ... | field (type) |
918 { +------------+-------------+-----------------+
919 |<-- room -->|<- voffset ->|<---- size ----->|
920 o o
921 | |
922 record_addr vblock_addr
923
924 Every length is in sizetype bytes there, except "pos" which has to be
925 set as a bit position in the GCC tree for the record. */
926 tree room_st = size_int (room);
927 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
928 tree voffset_st, pos, field;
929
930 tree name = TYPE_IDENTIFIER (type);
931
932 name = concat_name (name, "ALIGN");
933 TYPE_NAME (record_type) = name;
934
935 /* Compute VOFFSET and then POS. The next byte position multiple of some
936 alignment after some address is obtained by "and"ing the alignment minus
937 1 with the two's complement of the address. */
938 voffset_st = size_binop (BIT_AND_EXPR,
939 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
940 size_int ((align / BITS_PER_UNIT) - 1));
941
942 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
943 pos = size_binop (MULT_EXPR,
944 convert (bitsizetype,
945 size_binop (PLUS_EXPR, room_st, voffset_st)),
946 bitsize_unit_node);
947
948 /* Craft the GCC record representation. We exceptionally do everything
949 manually here because 1) our generic circuitry is not quite ready to
950 handle the complex position/size expressions we are setting up, 2) we
951 have a strong simplifying factor at hand: we know the maximum possible
952 value of voffset, and 3) we have to set/reset at least the sizes in
953 accordance with this maximum value anyway, as we need them to convey
954 what should be "alloc"ated for this type.
955
956 Use -1 as the 'addressable' indication for the field to prevent the
957 creation of a bitfield. We don't need one, it would have damaging
958 consequences on the alignment computation, and create_field_decl would
959 make one without this special argument, for instance because of the
960 complex position expression. */
961 field = create_field_decl (get_identifier ("F"), type, record_type, size,
962 pos, 1, -1);
963 TYPE_FIELDS (record_type) = field;
964
965 SET_TYPE_ALIGN (record_type, base_align);
966 TYPE_USER_ALIGN (record_type) = 1;
967
968 TYPE_SIZE (record_type)
969 = size_binop (PLUS_EXPR,
970 size_binop (MULT_EXPR, convert (bitsizetype, size),
971 bitsize_unit_node),
972 bitsize_int (align + room * BITS_PER_UNIT));
973 TYPE_SIZE_UNIT (record_type)
974 = size_binop (PLUS_EXPR, size,
975 size_int (room + align / BITS_PER_UNIT));
976
977 SET_TYPE_MODE (record_type, BLKmode);
978 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
979
980 /* Declare it now since it will never be declared otherwise. This is
981 necessary to ensure that its subtrees are properly marked. */
982 create_type_decl (name, record_type, true, false, gnat_node);
983
984 return record_type;
985 }
986
987 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
988 as the field type of a packed record if IN_RECORD is true, or as the
989 component type of a packed array if IN_RECORD is false. See if we can
990 rewrite it either as a type that has non-BLKmode, which we can pack
991 tighter in the packed record case, or as a smaller type with at most
992 MAX_ALIGN alignment if the value is non-zero. If so, return the new
993 type; if not, return the original type. */
994
995 tree
996 make_packable_type (tree type, bool in_record, unsigned int max_align)
997 {
998 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
999 unsigned HOST_WIDE_INT new_size;
1000 unsigned int align = TYPE_ALIGN (type);
1001 unsigned int new_align;
1002
1003 /* No point in doing anything if the size is zero. */
1004 if (size == 0)
1005 return type;
1006
1007 tree new_type = make_node (TREE_CODE (type));
1008
1009 /* Copy the name and flags from the old type to that of the new.
1010 Note that we rely on the pointer equality created here for
1011 TYPE_NAME to look through conversions in various places. */
1012 TYPE_NAME (new_type) = TYPE_NAME (type);
1013 TYPE_PACKED (new_type) = 1;
1014 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
1015 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
1016 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
1017 if (TREE_CODE (type) == RECORD_TYPE)
1018 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
1019
1020 /* If we are in a record and have a small size, set the alignment to
1021 try for an integral mode. Otherwise set it to try for a smaller
1022 type with BLKmode. */
1023 if (in_record && size <= MAX_FIXED_MODE_SIZE)
1024 {
1025 new_size = ceil_pow2 (size);
1026 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
1027 SET_TYPE_ALIGN (new_type, new_align);
1028 }
1029 else
1030 {
1031 tree type_size = TYPE_ADA_SIZE (type);
1032 /* Do not try to shrink the size if the RM size is not constant. */
1033 if (TYPE_CONTAINS_TEMPLATE_P (type)
1034 || !tree_fits_uhwi_p (type_size))
1035 return type;
1036
1037 /* Round the RM size up to a unit boundary to get the minimal size
1038 for a BLKmode record. Give up if it's already the size and we
1039 don't need to lower the alignment. */
1040 new_size = tree_to_uhwi (type_size);
1041 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1042 if (new_size == size && (max_align == 0 || align <= max_align))
1043 return type;
1044
1045 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1046 if (max_align > 0 && new_align > max_align)
1047 new_align = max_align;
1048 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1049 }
1050
1051 TYPE_USER_ALIGN (new_type) = 1;
1052
1053 /* Now copy the fields, keeping the position and size as we don't want
1054 to change the layout by propagating the packedness downwards. */
1055 tree new_field_list = NULL_TREE;
1056 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1057 {
1058 tree new_field_type = TREE_TYPE (field);
1059 tree new_field, new_field_size;
1060
1061 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1062 && !TYPE_FAT_POINTER_P (new_field_type)
1063 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1064 new_field_type = make_packable_type (new_field_type, true, max_align);
1065
1066 /* However, for the last field in a not already packed record type
1067 that is of an aggregate type, we need to use the RM size in the
1068 packable version of the record type, see finish_record_type. */
1069 if (!DECL_CHAIN (field)
1070 && !TYPE_PACKED (type)
1071 && RECORD_OR_UNION_TYPE_P (new_field_type)
1072 && !TYPE_FAT_POINTER_P (new_field_type)
1073 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1074 && TYPE_ADA_SIZE (new_field_type))
1075 new_field_size = TYPE_ADA_SIZE (new_field_type);
1076 else
1077 new_field_size = DECL_SIZE (field);
1078
1079 /* This is a layout with full representation, alignment and size clauses
1080 so we simply pass 0 as PACKED like gnat_to_gnu_field in this case. */
1081 new_field
1082 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1083 new_field_size, bit_position (field), 0,
1084 !DECL_NONADDRESSABLE_P (field));
1085
1086 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1087 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1088 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1089 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1090
1091 DECL_CHAIN (new_field) = new_field_list;
1092 new_field_list = new_field;
1093 }
1094
1095 /* If this is a padding record, we never want to make the size smaller
1096 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1097 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1098 {
1099 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1100 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1101 new_size = size;
1102 }
1103 else
1104 {
1105 TYPE_SIZE (new_type) = bitsize_int (new_size);
1106 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1107 }
1108
1109 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1110 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1111
1112 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1113 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1114 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1115 SET_TYPE_DEBUG_TYPE (new_type, TYPE_DEBUG_TYPE (type));
1116 else if (TYPE_STUB_DECL (type))
1117 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1118 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1119
1120 /* Try harder to get a packable type if necessary, for example
1121 in case the record itself contains a BLKmode field. */
1122 if (in_record && TYPE_MODE (new_type) == BLKmode)
1123 SET_TYPE_MODE (new_type,
1124 mode_for_size_tree (TYPE_SIZE (new_type),
1125 MODE_INT, 1).else_blk ());
1126
1127 /* If neither mode nor size nor alignment shrunk, return the old type. */
1128 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1129 return type;
1130
1131 return new_type;
1132 }
1133
1134 /* Return true if TYPE has an unsigned representation. This needs to be used
1135 when the representation of types whose precision is not equal to their size
1136 is manipulated based on the RM size. */
1137
1138 static inline bool
1139 type_unsigned_for_rm (tree type)
1140 {
1141 /* This is the common case. */
1142 if (TYPE_UNSIGNED (type))
1143 return true;
1144
1145 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1146 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1147 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1148 return true;
1149
1150 return false;
1151 }
1152
1153 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1154 If TYPE is the best type, return it. Otherwise, make a new type. We
1155 only support new integral and pointer types. FOR_BIASED is true if
1156 we are making a biased type. */
1157
1158 tree
1159 make_type_from_size (tree type, tree size_tree, bool for_biased)
1160 {
1161 unsigned HOST_WIDE_INT size;
1162 bool biased_p;
1163 tree new_type;
1164
1165 /* If size indicates an error, just return TYPE to avoid propagating
1166 the error. Likewise if it's too large to represent. */
1167 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1168 return type;
1169
1170 size = tree_to_uhwi (size_tree);
1171
1172 switch (TREE_CODE (type))
1173 {
1174 case BOOLEAN_TYPE:
1175 /* Do not mess with boolean types that have foreign convention. */
1176 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1177 break;
1178
1179 /* ... fall through ... */
1180
1181 case INTEGER_TYPE:
1182 case ENUMERAL_TYPE:
1183 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1184 && TYPE_BIASED_REPRESENTATION_P (type));
1185
1186 /* Integer types with precision 0 are forbidden. */
1187 if (size == 0)
1188 size = 1;
1189
1190 /* Only do something if the type isn't a packed array type and doesn't
1191 already have the proper size and the size isn't too large. */
1192 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1193 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1194 || size > LONG_LONG_TYPE_SIZE)
1195 break;
1196
1197 biased_p |= for_biased;
1198
1199 /* The type should be an unsigned type if the original type is unsigned
1200 or if the lower bound is constant and non-negative or if the type is
1201 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1202 if (type_unsigned_for_rm (type) || biased_p)
1203 new_type = make_unsigned_type (size);
1204 else
1205 new_type = make_signed_type (size);
1206 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1207 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1208 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1209 /* Copy the name to show that it's essentially the same type and
1210 not a subrange type. */
1211 TYPE_NAME (new_type) = TYPE_NAME (type);
1212 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1213 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1214 return new_type;
1215
1216 case RECORD_TYPE:
1217 /* Do something if this is a fat pointer, in which case we
1218 may need to return the thin pointer. */
1219 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1220 {
1221 scalar_int_mode p_mode;
1222 if (!int_mode_for_size (size, 0).exists (&p_mode)
1223 || !targetm.valid_pointer_mode (p_mode))
1224 p_mode = ptr_mode;
1225 return
1226 build_pointer_type_for_mode
1227 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1228 p_mode, 0);
1229 }
1230 break;
1231
1232 case POINTER_TYPE:
1233 /* Only do something if this is a thin pointer, in which case we
1234 may need to return the fat pointer. */
1235 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1236 return
1237 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1238 break;
1239
1240 default:
1241 break;
1242 }
1243
1244 return type;
1245 }
1246
1247 /* Return true iff the padded types are equivalent. */
1248
1249 bool
1250 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1251 {
1252 tree type1, type2;
1253
1254 if (t1->hash != t2->hash)
1255 return 0;
1256
1257 type1 = t1->type;
1258 type2 = t2->type;
1259
1260 /* We consider that the padded types are equivalent if they pad the same type
1261 and have the same size, alignment, RM size and storage order. Taking the
1262 mode into account is redundant since it is determined by the others. */
1263 return
1264 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1265 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1266 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1267 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1268 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1269 }
1270
1271 /* Compute the hash value for the padded TYPE. */
1272
1273 static hashval_t
1274 hash_pad_type (tree type)
1275 {
1276 hashval_t hashcode;
1277
1278 hashcode
1279 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1280 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1281 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1282 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1283
1284 return hashcode;
1285 }
1286
1287 /* Look up the padded TYPE in the hash table and return its canonical version
1288 if it exists; otherwise, insert it into the hash table. */
1289
1290 static tree
1291 canonicalize_pad_type (tree type)
1292 {
1293 const hashval_t hashcode = hash_pad_type (type);
1294 struct pad_type_hash in, *h, **slot;
1295
1296 in.hash = hashcode;
1297 in.type = type;
1298 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1299 h = *slot;
1300 if (!h)
1301 {
1302 h = ggc_alloc<pad_type_hash> ();
1303 h->hash = hashcode;
1304 h->type = type;
1305 *slot = h;
1306 }
1307
1308 return h->type;
1309 }
1310
1311 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1312 if needed. We have already verified that SIZE and ALIGN are large enough.
1313 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1314 IS_COMPONENT_TYPE is true if this is being done for the component type of
1315 an array. IS_USER_TYPE is true if the original type needs to be completed.
1316 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1317 the RM size of the resulting type is to be set to SIZE too; in this case,
1318 the padded type is canonicalized before being returned. */
1319
1320 tree
1321 maybe_pad_type (tree type, tree size, unsigned int align,
1322 Entity_Id gnat_entity, bool is_component_type,
1323 bool is_user_type, bool definition, bool set_rm_size)
1324 {
1325 tree orig_size = TYPE_SIZE (type);
1326 unsigned int orig_align = TYPE_ALIGN (type);
1327 tree record, field;
1328
1329 /* If TYPE is a padded type, see if it agrees with any size and alignment
1330 we were given. If so, return the original type. Otherwise, strip
1331 off the padding, since we will either be returning the inner type
1332 or repadding it. If no size or alignment is specified, use that of
1333 the original padded type. */
1334 if (TYPE_IS_PADDING_P (type))
1335 {
1336 if ((!size
1337 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1338 && (align == 0 || align == orig_align))
1339 return type;
1340
1341 if (!size)
1342 size = orig_size;
1343 if (align == 0)
1344 align = orig_align;
1345
1346 type = TREE_TYPE (TYPE_FIELDS (type));
1347 orig_size = TYPE_SIZE (type);
1348 orig_align = TYPE_ALIGN (type);
1349 }
1350
1351 /* If the size is either not being changed or is being made smaller (which
1352 is not done here and is only valid for bitfields anyway), show the size
1353 isn't changing. Likewise, clear the alignment if it isn't being
1354 changed. Then return if we aren't doing anything. */
1355 if (size
1356 && (operand_equal_p (size, orig_size, 0)
1357 || (TREE_CODE (orig_size) == INTEGER_CST
1358 && tree_int_cst_lt (size, orig_size))))
1359 size = NULL_TREE;
1360
1361 if (align == orig_align)
1362 align = 0;
1363
1364 if (align == 0 && !size)
1365 return type;
1366
1367 /* If requested, complete the original type and give it a name. */
1368 if (is_user_type)
1369 create_type_decl (get_entity_name (gnat_entity), type,
1370 !Comes_From_Source (gnat_entity),
1371 !(TYPE_NAME (type)
1372 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1373 && DECL_IGNORED_P (TYPE_NAME (type))),
1374 gnat_entity);
1375
1376 /* We used to modify the record in place in some cases, but that could
1377 generate incorrect debugging information. So make a new record
1378 type and name. */
1379 record = make_node (RECORD_TYPE);
1380 TYPE_PADDING_P (record) = 1;
1381
1382 /* ??? Padding types around packed array implementation types will be
1383 considered as root types in the array descriptor language hook (see
1384 gnat_get_array_descr_info). Give them the original packed array type
1385 name so that the one coming from sources appears in the debugging
1386 information. */
1387 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1388 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1389 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1390 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1391 else if (Present (gnat_entity))
1392 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1393
1394 SET_TYPE_ALIGN (record, align ? align : orig_align);
1395 TYPE_SIZE (record) = size ? size : orig_size;
1396 TYPE_SIZE_UNIT (record)
1397 = convert (sizetype,
1398 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1399 bitsize_unit_node));
1400
1401 /* If we are changing the alignment and the input type is a record with
1402 BLKmode and a small constant size, try to make a form that has an
1403 integral mode. This might allow the padding record to also have an
1404 integral mode, which will be much more efficient. There is no point
1405 in doing so if a size is specified unless it is also a small constant
1406 size and it is incorrect to do so if we cannot guarantee that the mode
1407 will be naturally aligned since the field must always be addressable.
1408
1409 ??? This might not always be a win when done for a stand-alone object:
1410 since the nominal and the effective type of the object will now have
1411 different modes, a VIEW_CONVERT_EXPR will be required for converting
1412 between them and it might be hard to overcome afterwards, including
1413 at the RTL level when the stand-alone object is accessed as a whole. */
1414 if (align != 0
1415 && RECORD_OR_UNION_TYPE_P (type)
1416 && TYPE_MODE (type) == BLKmode
1417 && !TYPE_BY_REFERENCE_P (type)
1418 && TREE_CODE (orig_size) == INTEGER_CST
1419 && !TREE_OVERFLOW (orig_size)
1420 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1421 && (!size
1422 || (TREE_CODE (size) == INTEGER_CST
1423 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1424 {
1425 tree packable_type = make_packable_type (type, true);
1426 if (TYPE_MODE (packable_type) != BLKmode
1427 && align >= TYPE_ALIGN (packable_type))
1428 type = packable_type;
1429 }
1430
1431 /* Now create the field with the original size. */
1432 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1433 bitsize_zero_node, 0, 1);
1434 DECL_INTERNAL_P (field) = 1;
1435
1436 /* We will output additional debug info manually below. */
1437 finish_record_type (record, field, 1, false);
1438
1439 /* Set the RM size if requested. */
1440 if (set_rm_size)
1441 {
1442 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1443
1444 /* If the padded type is complete and has constant size, we canonicalize
1445 it by means of the hash table. This is consistent with the language
1446 semantics and ensures that gigi and the middle-end have a common view
1447 of these padded types. */
1448 if (TREE_CONSTANT (TYPE_SIZE (record)))
1449 {
1450 tree canonical = canonicalize_pad_type (record);
1451 if (canonical != record)
1452 {
1453 record = canonical;
1454 goto built;
1455 }
1456 }
1457 }
1458
1459 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1460 SET_TYPE_DEBUG_TYPE (record, maybe_debug_type (type));
1461
1462 /* Unless debugging information isn't being written for the input type,
1463 write a record that shows what we are a subtype of and also make a
1464 variable that indicates our size, if still variable. */
1465 if (TREE_CODE (orig_size) != INTEGER_CST
1466 && TYPE_NAME (record)
1467 && TYPE_NAME (type)
1468 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1469 && DECL_IGNORED_P (TYPE_NAME (type))))
1470 {
1471 tree name = TYPE_IDENTIFIER (record);
1472 tree size_unit = TYPE_SIZE_UNIT (record);
1473
1474 /* A variable that holds the size is required even with no encoding since
1475 it will be referenced by debugging information attributes. At global
1476 level, we need a single variable across all translation units. */
1477 if (size
1478 && TREE_CODE (size) != INTEGER_CST
1479 && (definition || global_bindings_p ()))
1480 {
1481 /* Whether or not gnat_entity comes from source, this XVZ variable is
1482 is a compilation artifact. */
1483 size_unit
1484 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1485 size_unit, true, global_bindings_p (),
1486 !definition && global_bindings_p (), false,
1487 false, true, true, NULL, gnat_entity);
1488 TYPE_SIZE_UNIT (record) = size_unit;
1489 }
1490
1491 /* There is no need to show what we are a subtype of when outputting as
1492 few encodings as possible: regular debugging infomation makes this
1493 redundant. */
1494 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1495 {
1496 tree marker = make_node (RECORD_TYPE);
1497 tree orig_name = TYPE_IDENTIFIER (type);
1498
1499 TYPE_NAME (marker) = concat_name (name, "XVS");
1500 finish_record_type (marker,
1501 create_field_decl (orig_name,
1502 build_reference_type (type),
1503 marker, NULL_TREE, NULL_TREE,
1504 0, 0),
1505 0, true);
1506 TYPE_SIZE_UNIT (marker) = size_unit;
1507
1508 add_parallel_type (record, marker);
1509 }
1510 }
1511
1512 built:
1513 /* If a simple size was explicitly given, maybe issue a warning. */
1514 if (!size
1515 || TREE_CODE (size) == COND_EXPR
1516 || TREE_CODE (size) == MAX_EXPR
1517 || No (gnat_entity))
1518 return record;
1519
1520 /* But don't do it if we are just annotating types and the type is tagged or
1521 concurrent, since these types aren't fully laid out in this mode. */
1522 if (type_annotate_only)
1523 {
1524 Entity_Id gnat_type
1525 = is_component_type
1526 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1527
1528 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1529 return record;
1530 }
1531
1532 /* Take the original size as the maximum size of the input if there was an
1533 unconstrained record involved and round it up to the specified alignment,
1534 if one was specified, but only for aggregate types. */
1535 if (CONTAINS_PLACEHOLDER_P (orig_size))
1536 orig_size = max_size (orig_size, true);
1537
1538 if (align && AGGREGATE_TYPE_P (type))
1539 orig_size = round_up (orig_size, align);
1540
1541 if (!operand_equal_p (size, orig_size, 0)
1542 && !(TREE_CODE (size) == INTEGER_CST
1543 && TREE_CODE (orig_size) == INTEGER_CST
1544 && (TREE_OVERFLOW (size)
1545 || TREE_OVERFLOW (orig_size)
1546 || tree_int_cst_lt (size, orig_size))))
1547 {
1548 Node_Id gnat_error_node;
1549
1550 /* For a packed array, post the message on the original array type. */
1551 if (Is_Packed_Array_Impl_Type (gnat_entity))
1552 gnat_entity = Original_Array_Type (gnat_entity);
1553
1554 if ((Ekind (gnat_entity) == E_Component
1555 || Ekind (gnat_entity) == E_Discriminant)
1556 && Present (Component_Clause (gnat_entity)))
1557 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1558 else if (Has_Size_Clause (gnat_entity))
1559 gnat_error_node = Expression (Size_Clause (gnat_entity));
1560 else if (Has_Object_Size_Clause (gnat_entity))
1561 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1562 else
1563 gnat_error_node = Empty;
1564
1565 /* Generate message only for entities that come from source, since
1566 if we have an entity created by expansion, the message will be
1567 generated for some other corresponding source entity. */
1568 if (Comes_From_Source (gnat_entity))
1569 {
1570 if (is_component_type)
1571 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1572 gnat_entity, gnat_entity,
1573 size_diffop (size, orig_size));
1574 else if (Present (gnat_error_node))
1575 post_error_ne_tree ("{^ }bits of & unused?",
1576 gnat_error_node, gnat_entity,
1577 size_diffop (size, orig_size));
1578 }
1579 }
1580
1581 return record;
1582 }
1583
1584 /* Return true if padded TYPE was built with an RM size. */
1585
1586 bool
1587 pad_type_has_rm_size (tree type)
1588 {
1589 /* This is required for the lookup. */
1590 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1591 return false;
1592
1593 const hashval_t hashcode = hash_pad_type (type);
1594 struct pad_type_hash in, *h;
1595
1596 in.hash = hashcode;
1597 in.type = type;
1598 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1599
1600 /* The types built with an RM size are the canonicalized ones. */
1601 return h && h->type == type;
1602 }
1603
1604 /* Return a copy of the padded TYPE but with reverse storage order. */
1605
1606 tree
1607 set_reverse_storage_order_on_pad_type (tree type)
1608 {
1609 if (flag_checking)
1610 {
1611 /* If the inner type is not scalar then the function does nothing. */
1612 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1613 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1614 && !VECTOR_TYPE_P (inner_type));
1615 }
1616
1617 /* This is required for the canonicalization. */
1618 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1619
1620 tree field = copy_node (TYPE_FIELDS (type));
1621 type = copy_type (type);
1622 DECL_CONTEXT (field) = type;
1623 TYPE_FIELDS (type) = field;
1624 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1625 return canonicalize_pad_type (type);
1626 }
1627 \f
1628 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1629 If this is a multi-dimensional array type, do this recursively.
1630
1631 OP may be
1632 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1633 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1634 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1635
1636 void
1637 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1638 {
1639 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1640 of a one-dimensional array, since the padding has the same alias set
1641 as the field type, but if it's a multi-dimensional array, we need to
1642 see the inner types. */
1643 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1644 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1645 || TYPE_PADDING_P (gnu_old_type)))
1646 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1647
1648 /* Unconstrained array types are deemed incomplete and would thus be given
1649 alias set 0. Retrieve the underlying array type. */
1650 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1651 gnu_old_type
1652 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1653 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1654 gnu_new_type
1655 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1656
1657 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1658 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1659 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1660 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1661
1662 switch (op)
1663 {
1664 case ALIAS_SET_COPY:
1665 /* The alias set shouldn't be copied between array types with different
1666 aliasing settings because this can break the aliasing relationship
1667 between the array type and its element type. */
1668 if (flag_checking || flag_strict_aliasing)
1669 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1670 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1671 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1672 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1673
1674 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1675 break;
1676
1677 case ALIAS_SET_SUBSET:
1678 case ALIAS_SET_SUPERSET:
1679 {
1680 alias_set_type old_set = get_alias_set (gnu_old_type);
1681 alias_set_type new_set = get_alias_set (gnu_new_type);
1682
1683 /* Do nothing if the alias sets conflict. This ensures that we
1684 never call record_alias_subset several times for the same pair
1685 or at all for alias set 0. */
1686 if (!alias_sets_conflict_p (old_set, new_set))
1687 {
1688 if (op == ALIAS_SET_SUBSET)
1689 record_alias_subset (old_set, new_set);
1690 else
1691 record_alias_subset (new_set, old_set);
1692 }
1693 }
1694 break;
1695
1696 default:
1697 gcc_unreachable ();
1698 }
1699
1700 record_component_aliases (gnu_new_type);
1701 }
1702 \f
1703 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1704 ARTIFICIAL_P is true if the type was generated by the compiler. */
1705
1706 void
1707 record_builtin_type (const char *name, tree type, bool artificial_p)
1708 {
1709 tree type_decl = build_decl (input_location,
1710 TYPE_DECL, get_identifier (name), type);
1711 DECL_ARTIFICIAL (type_decl) = artificial_p;
1712 TYPE_ARTIFICIAL (type) = artificial_p;
1713 gnat_pushdecl (type_decl, Empty);
1714
1715 if (debug_hooks->type_decl)
1716 debug_hooks->type_decl (type_decl, false);
1717 }
1718 \f
1719 /* Finish constructing the character type CHAR_TYPE.
1720
1721 In Ada character types are enumeration types and, as a consequence, are
1722 represented in the front-end by integral types holding the positions of
1723 the enumeration values as defined by the language, which means that the
1724 integral types are unsigned.
1725
1726 Unfortunately the signedness of 'char' in C is implementation-defined
1727 and GCC even has the option -f[un]signed-char to toggle it at run time.
1728 Since GNAT's philosophy is to be compatible with C by default, to wit
1729 Interfaces.C.char is defined as a mere copy of Character, we may need
1730 to declare character types as signed types in GENERIC and generate the
1731 necessary adjustments to make them behave as unsigned types.
1732
1733 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1734 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1735 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1736 types. The idea is to ensure that the bit pattern contained in the
1737 Esize'd objects is not changed, even though the numerical value will
1738 be interpreted differently depending on the signedness. */
1739
1740 void
1741 finish_character_type (tree char_type)
1742 {
1743 if (TYPE_UNSIGNED (char_type))
1744 return;
1745
1746 /* Make a copy of a generic unsigned version since we'll modify it. */
1747 tree unsigned_char_type
1748 = (char_type == char_type_node
1749 ? unsigned_char_type_node
1750 : copy_type (gnat_unsigned_type_for (char_type)));
1751
1752 /* Create an unsigned version of the type and set it as debug type. */
1753 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1754 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1755 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1756 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1757
1758 /* If this is a subtype, make the debug type a subtype of the debug type
1759 of the base type and convert literal RM bounds to unsigned. */
1760 if (TREE_TYPE (char_type))
1761 {
1762 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1763 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1764 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1765
1766 if (TREE_CODE (min_value) == INTEGER_CST)
1767 min_value = fold_convert (base_unsigned_char_type, min_value);
1768 if (TREE_CODE (max_value) == INTEGER_CST)
1769 max_value = fold_convert (base_unsigned_char_type, max_value);
1770
1771 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1772 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1773 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1774 }
1775
1776 /* Adjust the RM bounds of the original type to unsigned; that's especially
1777 important for types since they are implicit in this case. */
1778 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1779 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1780 }
1781
1782 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1783 finish constructing the record type as a fat pointer type. */
1784
1785 void
1786 finish_fat_pointer_type (tree record_type, tree field_list)
1787 {
1788 /* Make sure we can put it into a register. */
1789 if (STRICT_ALIGNMENT)
1790 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1791
1792 /* Show what it really is. */
1793 TYPE_FAT_POINTER_P (record_type) = 1;
1794
1795 /* Do not emit debug info for it since the types of its fields may still be
1796 incomplete at this point. */
1797 finish_record_type (record_type, field_list, 0, false);
1798
1799 /* Force type_contains_placeholder_p to return true on it. Although the
1800 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1801 type but the representation of the unconstrained array. */
1802 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1803 }
1804
1805 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1806 finish constructing the record or union type. If REP_LEVEL is zero, this
1807 record has no representation clause and so will be entirely laid out here.
1808 If REP_LEVEL is one, this record has a representation clause and has been
1809 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1810 this record is derived from a parent record and thus inherits its layout;
1811 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1812 additional debug info needs to be output for this type. */
1813
1814 void
1815 finish_record_type (tree record_type, tree field_list, int rep_level,
1816 bool debug_info_p)
1817 {
1818 enum tree_code code = TREE_CODE (record_type);
1819 tree name = TYPE_IDENTIFIER (record_type);
1820 tree ada_size = bitsize_zero_node;
1821 tree size = bitsize_zero_node;
1822 bool had_size = TYPE_SIZE (record_type) != 0;
1823 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1824 bool had_align = TYPE_ALIGN (record_type) != 0;
1825 tree field;
1826
1827 TYPE_FIELDS (record_type) = field_list;
1828
1829 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1830 generate debug info and have a parallel type. */
1831 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1832
1833 /* Globally initialize the record first. If this is a rep'ed record,
1834 that just means some initializations; otherwise, layout the record. */
1835 if (rep_level > 0)
1836 {
1837 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1838 TYPE_ALIGN (record_type)));
1839
1840 if (!had_size_unit)
1841 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1842
1843 if (!had_size)
1844 TYPE_SIZE (record_type) = bitsize_zero_node;
1845
1846 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1847 out just like a UNION_TYPE, since the size will be fixed. */
1848 else if (code == QUAL_UNION_TYPE)
1849 code = UNION_TYPE;
1850 }
1851 else
1852 {
1853 /* Ensure there isn't a size already set. There can be in an error
1854 case where there is a rep clause but all fields have errors and
1855 no longer have a position. */
1856 TYPE_SIZE (record_type) = 0;
1857
1858 /* Ensure we use the traditional GCC layout for bitfields when we need
1859 to pack the record type or have a representation clause. The other
1860 possible layout (Microsoft C compiler), if available, would prevent
1861 efficient packing in almost all cases. */
1862 #ifdef TARGET_MS_BITFIELD_LAYOUT
1863 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1864 decl_attributes (&record_type,
1865 tree_cons (get_identifier ("gcc_struct"),
1866 NULL_TREE, NULL_TREE),
1867 ATTR_FLAG_TYPE_IN_PLACE);
1868 #endif
1869
1870 layout_type (record_type);
1871 }
1872
1873 /* At this point, the position and size of each field is known. It was
1874 either set before entry by a rep clause, or by laying out the type above.
1875
1876 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1877 to compute the Ada size; the GCC size and alignment (for rep'ed records
1878 that are not padding types); and the mode (for rep'ed records). We also
1879 clear the DECL_BIT_FIELD indication for the cases we know have not been
1880 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1881
1882 if (code == QUAL_UNION_TYPE)
1883 field_list = nreverse (field_list);
1884
1885 for (field = field_list; field; field = DECL_CHAIN (field))
1886 {
1887 tree type = TREE_TYPE (field);
1888 tree pos = bit_position (field);
1889 tree this_size = DECL_SIZE (field);
1890 tree this_ada_size;
1891
1892 if (RECORD_OR_UNION_TYPE_P (type)
1893 && !TYPE_FAT_POINTER_P (type)
1894 && !TYPE_CONTAINS_TEMPLATE_P (type)
1895 && TYPE_ADA_SIZE (type))
1896 this_ada_size = TYPE_ADA_SIZE (type);
1897 else
1898 this_ada_size = this_size;
1899
1900 const bool variant_part = (TREE_CODE (type) == QUAL_UNION_TYPE);
1901 const bool variant_part_at_zero = variant_part && integer_zerop (pos);
1902
1903 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1904 if (DECL_BIT_FIELD (field)
1905 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1906 {
1907 unsigned int align = TYPE_ALIGN (type);
1908
1909 /* In the general case, type alignment is required. */
1910 if (value_factor_p (pos, align))
1911 {
1912 /* The enclosing record type must be sufficiently aligned.
1913 Otherwise, if no alignment was specified for it and it
1914 has been laid out already, bump its alignment to the
1915 desired one if this is compatible with its size and
1916 maximum alignment, if any. */
1917 if (TYPE_ALIGN (record_type) >= align)
1918 {
1919 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1920 DECL_BIT_FIELD (field) = 0;
1921 }
1922 else if (!had_align
1923 && rep_level == 0
1924 && value_factor_p (TYPE_SIZE (record_type), align)
1925 && (!TYPE_MAX_ALIGN (record_type)
1926 || TYPE_MAX_ALIGN (record_type) >= align))
1927 {
1928 SET_TYPE_ALIGN (record_type, align);
1929 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1930 DECL_BIT_FIELD (field) = 0;
1931 }
1932 }
1933
1934 /* In the non-strict alignment case, only byte alignment is. */
1935 if (!STRICT_ALIGNMENT
1936 && DECL_BIT_FIELD (field)
1937 && value_factor_p (pos, BITS_PER_UNIT))
1938 DECL_BIT_FIELD (field) = 0;
1939 }
1940
1941 /* Clear DECL_BIT_FIELD_TYPE for a variant part at offset 0, it's simply
1942 not supported by the DECL_BIT_FIELD_REPRESENTATIVE machinery because
1943 the variant part is always the last field in the list. */
1944 if (variant_part_at_zero)
1945 DECL_BIT_FIELD_TYPE (field) = NULL_TREE;
1946
1947 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1948 field is technically not addressable. Except that it can actually
1949 be addressed if it is BLKmode and happens to be properly aligned. */
1950 if (DECL_BIT_FIELD (field)
1951 && !(DECL_MODE (field) == BLKmode
1952 && value_factor_p (pos, BITS_PER_UNIT)))
1953 DECL_NONADDRESSABLE_P (field) = 1;
1954
1955 /* A type must be as aligned as its most aligned field that is not
1956 a bit-field. But this is already enforced by layout_type. */
1957 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1958 SET_TYPE_ALIGN (record_type,
1959 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1960
1961 switch (code)
1962 {
1963 case UNION_TYPE:
1964 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1965 size = size_binop (MAX_EXPR, size, this_size);
1966 break;
1967
1968 case QUAL_UNION_TYPE:
1969 ada_size
1970 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1971 this_ada_size, ada_size);
1972 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1973 this_size, size);
1974 break;
1975
1976 case RECORD_TYPE:
1977 /* Since we know here that all fields are sorted in order of
1978 increasing bit position, the size of the record is one
1979 higher than the ending bit of the last field processed,
1980 unless we have a variant part at offset 0, since in this
1981 case we might have a field outside the variant part that
1982 has a higher ending position; so use a MAX in this case.
1983 Also, if this field is a QUAL_UNION_TYPE, we need to take
1984 into account the previous size in the case of empty variants. */
1985 ada_size
1986 = merge_sizes (ada_size, pos, this_ada_size, variant_part,
1987 variant_part_at_zero);
1988 size
1989 = merge_sizes (size, pos, this_size, variant_part,
1990 variant_part_at_zero);
1991 break;
1992
1993 default:
1994 gcc_unreachable ();
1995 }
1996 }
1997
1998 if (code == QUAL_UNION_TYPE)
1999 nreverse (field_list);
2000
2001 /* We need to set the regular sizes if REP_LEVEL is one. */
2002 if (rep_level == 1)
2003 {
2004 /* If this is a padding record, we never want to make the size smaller
2005 than what was specified in it, if any. */
2006 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
2007 size = TYPE_SIZE (record_type);
2008
2009 tree size_unit = had_size_unit
2010 ? TYPE_SIZE_UNIT (record_type)
2011 : convert (sizetype,
2012 size_binop (CEIL_DIV_EXPR, size,
2013 bitsize_unit_node));
2014 const unsigned int align = TYPE_ALIGN (record_type);
2015
2016 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
2017 TYPE_SIZE_UNIT (record_type)
2018 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
2019 }
2020
2021 /* We need to set the Ada size if REP_LEVEL is zero or one. */
2022 if (rep_level < 2)
2023 {
2024 /* Now set any of the values we've just computed that apply. */
2025 if (!TYPE_FAT_POINTER_P (record_type)
2026 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
2027 SET_TYPE_ADA_SIZE (record_type, ada_size);
2028 }
2029
2030 /* We need to set the mode if REP_LEVEL is one or two. */
2031 if (rep_level > 0)
2032 {
2033 compute_record_mode (record_type);
2034 finish_bitfield_layout (record_type);
2035 }
2036
2037 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
2038 TYPE_MAX_ALIGN (record_type) = 0;
2039
2040 if (debug_info_p)
2041 rest_of_record_type_compilation (record_type);
2042 }
2043
2044 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
2045 PARRALEL_TYPE has no context and its computation is not deferred yet, also
2046 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
2047 moment TYPE will get a context. */
2048
2049 void
2050 add_parallel_type (tree type, tree parallel_type)
2051 {
2052 tree decl = TYPE_STUB_DECL (type);
2053
2054 while (DECL_PARALLEL_TYPE (decl))
2055 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2056
2057 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2058
2059 /* If PARALLEL_TYPE already has a context, we are done. */
2060 if (TYPE_CONTEXT (parallel_type))
2061 return;
2062
2063 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2064 it to PARALLEL_TYPE. */
2065 if (TYPE_CONTEXT (type))
2066 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2067
2068 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2069 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2070 so we have nothing to do in this case. */
2071 }
2072
2073 /* Return true if TYPE has a parallel type. */
2074
2075 static bool
2076 has_parallel_type (tree type)
2077 {
2078 tree decl = TYPE_STUB_DECL (type);
2079
2080 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2081 }
2082
2083 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2084 associated with it. It need not be invoked directly in most cases as
2085 finish_record_type takes care of doing so. */
2086
2087 void
2088 rest_of_record_type_compilation (tree record_type)
2089 {
2090 bool var_size = false;
2091 tree field;
2092
2093 /* If this is a padded type, the bulk of the debug info has already been
2094 generated for the field's type. */
2095 if (TYPE_IS_PADDING_P (record_type))
2096 return;
2097
2098 /* If the type already has a parallel type (XVS type), then we're done. */
2099 if (has_parallel_type (record_type))
2100 return;
2101
2102 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2103 {
2104 /* We need to make an XVE/XVU record if any field has variable size,
2105 whether or not the record does. For example, if we have a union,
2106 it may be that all fields, rounded up to the alignment, have the
2107 same size, in which case we'll use that size. But the debug
2108 output routines (except Dwarf2) won't be able to output the fields,
2109 so we need to make the special record. */
2110 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2111 /* If a field has a non-constant qualifier, the record will have
2112 variable size too. */
2113 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2114 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2115 {
2116 var_size = true;
2117 break;
2118 }
2119 }
2120
2121 /* If this record type is of variable size, make a parallel record type that
2122 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2123 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2124 {
2125 tree new_record_type
2126 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2127 ? UNION_TYPE : TREE_CODE (record_type));
2128 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2129 tree last_pos = bitsize_zero_node;
2130 tree old_field, prev_old_field = NULL_TREE;
2131
2132 new_name
2133 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2134 ? "XVU" : "XVE");
2135 TYPE_NAME (new_record_type) = new_name;
2136 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2137 TYPE_STUB_DECL (new_record_type)
2138 = create_type_stub_decl (new_name, new_record_type);
2139 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2140 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2141 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2142 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2143 TYPE_SIZE_UNIT (new_record_type)
2144 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2145
2146 /* Now scan all the fields, replacing each field with a new field
2147 corresponding to the new encoding. */
2148 for (old_field = TYPE_FIELDS (record_type); old_field;
2149 old_field = DECL_CHAIN (old_field))
2150 {
2151 tree field_type = TREE_TYPE (old_field);
2152 tree field_name = DECL_NAME (old_field);
2153 tree curpos = fold_bit_position (old_field);
2154 tree pos, new_field;
2155 bool var = false;
2156 unsigned int align = 0;
2157
2158 /* See how the position was modified from the last position.
2159
2160 There are two basic cases we support: a value was added
2161 to the last position or the last position was rounded to
2162 a boundary and they something was added. Check for the
2163 first case first. If not, see if there is any evidence
2164 of rounding. If so, round the last position and retry.
2165
2166 If this is a union, the position can be taken as zero. */
2167 if (TREE_CODE (new_record_type) == UNION_TYPE)
2168 pos = bitsize_zero_node;
2169 else
2170 pos = compute_related_constant (curpos, last_pos);
2171
2172 if (!pos
2173 && TREE_CODE (curpos) == MULT_EXPR
2174 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2175 {
2176 tree offset = TREE_OPERAND (curpos, 0);
2177 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2178 align = scale_by_factor_of (offset, align);
2179 last_pos = round_up (last_pos, align);
2180 pos = compute_related_constant (curpos, last_pos);
2181 }
2182 else if (!pos
2183 && TREE_CODE (curpos) == PLUS_EXPR
2184 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2185 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2186 && tree_fits_uhwi_p
2187 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2188 {
2189 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2190 unsigned HOST_WIDE_INT addend
2191 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2192 align
2193 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2194 align = scale_by_factor_of (offset, align);
2195 align = MIN (align, addend & -addend);
2196 last_pos = round_up (last_pos, align);
2197 pos = compute_related_constant (curpos, last_pos);
2198 }
2199 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2200 {
2201 align = TYPE_ALIGN (field_type);
2202 last_pos = round_up (last_pos, align);
2203 pos = compute_related_constant (curpos, last_pos);
2204 }
2205
2206 /* If we can't compute a position, set it to zero.
2207
2208 ??? We really should abort here, but it's too much work
2209 to get this correct for all cases. */
2210 if (!pos)
2211 pos = bitsize_zero_node;
2212
2213 /* See if this type is variable-sized and make a pointer type
2214 and indicate the indirection if so. Beware that the debug
2215 back-end may adjust the position computed above according
2216 to the alignment of the field type, i.e. the pointer type
2217 in this case, if we don't preventively counter that. */
2218 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2219 {
2220 field_type = build_pointer_type (field_type);
2221 if (align != 0 && TYPE_ALIGN (field_type) > align)
2222 {
2223 field_type = copy_type (field_type);
2224 SET_TYPE_ALIGN (field_type, align);
2225 }
2226 var = true;
2227 }
2228
2229 /* Make a new field name, if necessary. */
2230 if (var || align != 0)
2231 {
2232 char suffix[16];
2233
2234 if (align != 0)
2235 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2236 align / BITS_PER_UNIT);
2237 else
2238 strcpy (suffix, "XVL");
2239
2240 field_name = concat_name (field_name, suffix);
2241 }
2242
2243 new_field
2244 = create_field_decl (field_name, field_type, new_record_type,
2245 DECL_SIZE (old_field), pos, 0, 0);
2246 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2247 TYPE_FIELDS (new_record_type) = new_field;
2248
2249 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2250 zero. The only time it's not the last field of the record
2251 is when there are other components at fixed positions after
2252 it (meaning there was a rep clause for every field) and we
2253 want to be able to encode them. */
2254 last_pos = size_binop (PLUS_EXPR, curpos,
2255 (TREE_CODE (TREE_TYPE (old_field))
2256 == QUAL_UNION_TYPE)
2257 ? bitsize_zero_node
2258 : DECL_SIZE (old_field));
2259 prev_old_field = old_field;
2260 }
2261
2262 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2263
2264 add_parallel_type (record_type, new_record_type);
2265 }
2266 }
2267
2268 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2269 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2270 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2271 replace a value of zero with the old size. If MAX is true, we take the
2272 MAX of the end position of this field with LAST_SIZE. In all other cases,
2273 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2274
2275 static tree
2276 merge_sizes (tree last_size, tree first_bit, tree size, bool special, bool max)
2277 {
2278 tree type = TREE_TYPE (last_size);
2279 tree new_size;
2280
2281 if (!special || TREE_CODE (size) != COND_EXPR)
2282 {
2283 new_size = size_binop (PLUS_EXPR, first_bit, size);
2284 if (max)
2285 new_size = size_binop (MAX_EXPR, last_size, new_size);
2286 }
2287
2288 else
2289 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2290 integer_zerop (TREE_OPERAND (size, 1))
2291 ? last_size : merge_sizes (last_size, first_bit,
2292 TREE_OPERAND (size, 1),
2293 1, max),
2294 integer_zerop (TREE_OPERAND (size, 2))
2295 ? last_size : merge_sizes (last_size, first_bit,
2296 TREE_OPERAND (size, 2),
2297 1, max));
2298
2299 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2300 when fed through SUBSTITUTE_IN_EXPR) into thinking that a constant
2301 size is not constant. */
2302 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2303 new_size = TREE_OPERAND (new_size, 0);
2304
2305 return new_size;
2306 }
2307
2308 /* Return the bit position of FIELD, in bits from the start of the record,
2309 and fold it as much as possible. This is a tree of type bitsizetype. */
2310
2311 static tree
2312 fold_bit_position (const_tree field)
2313 {
2314 tree offset = DECL_FIELD_OFFSET (field);
2315 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2316 offset = size_binop (TREE_CODE (offset),
2317 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2318 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2319 else
2320 offset = fold_convert (bitsizetype, offset);
2321 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2322 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2323 }
2324
2325 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2326 related by the addition of a constant. Return that constant if so. */
2327
2328 static tree
2329 compute_related_constant (tree op0, tree op1)
2330 {
2331 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2332
2333 if (TREE_CODE (op0) == MULT_EXPR
2334 && TREE_CODE (op1) == MULT_EXPR
2335 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2336 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2337 {
2338 factor = TREE_OPERAND (op0, 1);
2339 op0 = TREE_OPERAND (op0, 0);
2340 op1 = TREE_OPERAND (op1, 0);
2341 }
2342 else
2343 factor = NULL_TREE;
2344
2345 op0_cst = split_plus (op0, &op0_var);
2346 op1_cst = split_plus (op1, &op1_var);
2347 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2348
2349 if (operand_equal_p (op0_var, op1_var, 0))
2350 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2351
2352 return NULL_TREE;
2353 }
2354
2355 /* Utility function of above to split a tree OP which may be a sum, into a
2356 constant part, which is returned, and a variable part, which is stored
2357 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2358 bitsizetype. */
2359
2360 static tree
2361 split_plus (tree in, tree *pvar)
2362 {
2363 /* Strip conversions in order to ease the tree traversal and maximize the
2364 potential for constant or plus/minus discovery. We need to be careful
2365 to always return and set *pvar to bitsizetype trees, but it's worth
2366 the effort. */
2367 in = remove_conversions (in, false);
2368
2369 *pvar = convert (bitsizetype, in);
2370
2371 if (TREE_CODE (in) == INTEGER_CST)
2372 {
2373 *pvar = bitsize_zero_node;
2374 return convert (bitsizetype, in);
2375 }
2376 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2377 {
2378 tree lhs_var, rhs_var;
2379 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2380 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2381
2382 if (lhs_var == TREE_OPERAND (in, 0)
2383 && rhs_var == TREE_OPERAND (in, 1))
2384 return bitsize_zero_node;
2385
2386 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2387 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2388 }
2389 else
2390 return bitsize_zero_node;
2391 }
2392 \f
2393 /* Return a copy of TYPE but safe to modify in any way. */
2394
2395 tree
2396 copy_type (tree type)
2397 {
2398 tree new_type = copy_node (type);
2399
2400 /* Unshare the language-specific data. */
2401 if (TYPE_LANG_SPECIFIC (type))
2402 {
2403 TYPE_LANG_SPECIFIC (new_type) = NULL;
2404 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2405 }
2406
2407 /* And the contents of the language-specific slot if needed. */
2408 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2409 && TYPE_RM_VALUES (type))
2410 {
2411 TYPE_RM_VALUES (new_type) = NULL_TREE;
2412 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2413 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2414 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2415 }
2416
2417 /* copy_node clears this field instead of copying it, because it is
2418 aliased with TREE_CHAIN. */
2419 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2420
2421 TYPE_POINTER_TO (new_type) = NULL_TREE;
2422 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2423 TYPE_MAIN_VARIANT (new_type) = new_type;
2424 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2425 TYPE_CANONICAL (new_type) = new_type;
2426
2427 return new_type;
2428 }
2429 \f
2430 /* Return a subtype of sizetype with range MIN to MAX and whose
2431 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2432 of the associated TYPE_DECL. */
2433
2434 tree
2435 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2436 {
2437 /* First build a type for the desired range. */
2438 tree type = build_nonshared_range_type (sizetype, min, max);
2439
2440 /* Then set the index type. */
2441 SET_TYPE_INDEX_TYPE (type, index);
2442 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2443
2444 return type;
2445 }
2446
2447 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2448 sizetype is used. */
2449
2450 tree
2451 create_range_type (tree type, tree min, tree max)
2452 {
2453 tree range_type;
2454
2455 if (!type)
2456 type = sizetype;
2457
2458 /* First build a type with the base range. */
2459 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2460 TYPE_MAX_VALUE (type));
2461
2462 /* Then set the actual range. */
2463 SET_TYPE_RM_MIN_VALUE (range_type, min);
2464 SET_TYPE_RM_MAX_VALUE (range_type, max);
2465
2466 return range_type;
2467 }
2468 \f
2469 \f/* Return an extra subtype of TYPE with range MIN to MAX. */
2470
2471 tree
2472 create_extra_subtype (tree type, tree min, tree max)
2473 {
2474 const bool uns = TYPE_UNSIGNED (type);
2475 const unsigned prec = TYPE_PRECISION (type);
2476 tree subtype = uns ? make_unsigned_type (prec) : make_signed_type (prec);
2477
2478 TREE_TYPE (subtype) = type;
2479 TYPE_EXTRA_SUBTYPE_P (subtype) = 1;
2480
2481 SET_TYPE_RM_MIN_VALUE (subtype, min);
2482 SET_TYPE_RM_MAX_VALUE (subtype, max);
2483
2484 return subtype;
2485 }
2486 \f
2487 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2488 NAME gives the name of the type to be used in the declaration. */
2489
2490 tree
2491 create_type_stub_decl (tree name, tree type)
2492 {
2493 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2494 DECL_ARTIFICIAL (type_decl) = 1;
2495 TYPE_ARTIFICIAL (type) = 1;
2496 return type_decl;
2497 }
2498
2499 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2500 used in the declaration. ARTIFICIAL_P is true if the declaration was
2501 generated by the compiler. DEBUG_INFO_P is true if we need to write
2502 debug information about this type. GNAT_NODE is used for the position
2503 of the decl. */
2504
2505 tree
2506 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2507 Node_Id gnat_node)
2508 {
2509 enum tree_code code = TREE_CODE (type);
2510 bool is_named
2511 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2512 tree type_decl;
2513
2514 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2515 gcc_assert (!TYPE_IS_DUMMY_P (type));
2516
2517 /* If the type hasn't been named yet, we're naming it; preserve an existing
2518 TYPE_STUB_DECL that has been attached to it for some purpose. */
2519 if (!is_named && TYPE_STUB_DECL (type))
2520 {
2521 type_decl = TYPE_STUB_DECL (type);
2522 DECL_NAME (type_decl) = name;
2523 }
2524 else
2525 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2526
2527 DECL_ARTIFICIAL (type_decl) = artificial_p;
2528 TYPE_ARTIFICIAL (type) = artificial_p;
2529
2530 /* Add this decl to the current binding level. */
2531 gnat_pushdecl (type_decl, gnat_node);
2532
2533 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2534 causes the name to be also viewed as a "tag" by the debug back-end, with
2535 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2536 types in DWARF.
2537
2538 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2539 from multiple contexts, and "type_decl" references a copy of it: in such a
2540 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2541 with the mechanism above. */
2542 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2543 TYPE_STUB_DECL (type) = type_decl;
2544
2545 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2546 back-end doesn't support, and for others if we don't need to. */
2547 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2548 DECL_IGNORED_P (type_decl) = 1;
2549
2550 return type_decl;
2551 }
2552 \f
2553 /* Return a VAR_DECL or CONST_DECL node.
2554
2555 NAME gives the name of the variable. ASM_NAME is its assembler name
2556 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2557 the GCC tree for an optional initial expression; NULL_TREE if none.
2558
2559 CONST_FLAG is true if this variable is constant, in which case we might
2560 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2561
2562 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2563 definition to be made visible outside of the current compilation unit, for
2564 instance variable definitions in a package specification.
2565
2566 EXTERN_FLAG is true when processing an external variable declaration (as
2567 opposed to a definition: no storage is to be allocated for the variable).
2568
2569 STATIC_FLAG is only relevant when not at top level and indicates whether
2570 to always allocate storage to the variable.
2571
2572 VOLATILE_FLAG is true if this variable is declared as volatile.
2573
2574 ARTIFICIAL_P is true if the variable was generated by the compiler.
2575
2576 DEBUG_INFO_P is true if we need to write debug information for it.
2577
2578 ATTR_LIST is the list of attributes to be attached to the variable.
2579
2580 GNAT_NODE is used for the position of the decl. */
2581
2582 tree
2583 create_var_decl (tree name, tree asm_name, tree type, tree init,
2584 bool const_flag, bool public_flag, bool extern_flag,
2585 bool static_flag, bool volatile_flag, bool artificial_p,
2586 bool debug_info_p, struct attrib *attr_list,
2587 Node_Id gnat_node, bool const_decl_allowed_p)
2588 {
2589 /* Whether the object has static storage duration, either explicitly or by
2590 virtue of being declared at the global level. */
2591 const bool static_storage = static_flag || global_bindings_p ();
2592
2593 /* Whether the initializer is constant: for an external object or an object
2594 with static storage duration, we check that the initializer is a valid
2595 constant expression for initializing a static variable; otherwise, we
2596 only check that it is constant. */
2597 const bool init_const
2598 = (init
2599 && gnat_types_compatible_p (type, TREE_TYPE (init))
2600 && (extern_flag || static_storage
2601 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2602 != NULL_TREE
2603 : TREE_CONSTANT (init)));
2604
2605 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2606 case the initializer may be used in lieu of the DECL node (as done in
2607 Identifier_to_gnu). This is useful to prevent the need of elaboration
2608 code when an identifier for which such a DECL is made is in turn used
2609 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2610 but extra constraints apply to this choice (see below) and they are not
2611 relevant to the distinction we wish to make. */
2612 const bool constant_p = const_flag && init_const;
2613
2614 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2615 and may be used for scalars in general but not for aggregates. */
2616 tree var_decl
2617 = build_decl (input_location,
2618 (constant_p
2619 && const_decl_allowed_p
2620 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2621 name, type);
2622
2623 /* Detect constants created by the front-end to hold 'reference to function
2624 calls for stabilization purposes. This is needed for renaming. */
2625 if (const_flag && init && POINTER_TYPE_P (type))
2626 {
2627 tree inner = init;
2628 if (TREE_CODE (inner) == COMPOUND_EXPR)
2629 inner = TREE_OPERAND (inner, 1);
2630 inner = remove_conversions (inner, true);
2631 if (TREE_CODE (inner) == ADDR_EXPR
2632 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2633 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2634 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2635 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2636 DECL_RETURN_VALUE_P (var_decl) = 1;
2637 }
2638
2639 /* If this is external, throw away any initializations (they will be done
2640 elsewhere) unless this is a constant for which we would like to remain
2641 able to get the initializer. If we are defining a global here, leave a
2642 constant initialization and save any variable elaborations for the
2643 elaboration routine. If we are just annotating types, throw away the
2644 initialization if it isn't a constant. */
2645 if ((extern_flag && !constant_p)
2646 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2647 init = NULL_TREE;
2648
2649 /* At the global level, a non-constant initializer generates elaboration
2650 statements. Check that such statements are allowed, that is to say,
2651 not violating a No_Elaboration_Code restriction. */
2652 if (init && !init_const && global_bindings_p ())
2653 Check_Elaboration_Code_Allowed (gnat_node);
2654
2655 /* Attach the initializer, if any. */
2656 DECL_INITIAL (var_decl) = init;
2657
2658 /* Directly set some flags. */
2659 DECL_ARTIFICIAL (var_decl) = artificial_p;
2660 DECL_EXTERNAL (var_decl) = extern_flag;
2661
2662 TREE_CONSTANT (var_decl) = constant_p;
2663 TREE_READONLY (var_decl) = const_flag;
2664
2665 /* The object is public if it is external or if it is declared public
2666 and has static storage duration. */
2667 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2668
2669 /* We need to allocate static storage for an object with static storage
2670 duration if it isn't external. */
2671 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2672
2673 TREE_SIDE_EFFECTS (var_decl)
2674 = TREE_THIS_VOLATILE (var_decl)
2675 = TYPE_VOLATILE (type) | volatile_flag;
2676
2677 if (TREE_SIDE_EFFECTS (var_decl))
2678 TREE_ADDRESSABLE (var_decl) = 1;
2679
2680 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2681 try to fiddle with DECL_COMMON. However, on platforms that don't
2682 support global BSS sections, uninitialized global variables would
2683 go in DATA instead, thus increasing the size of the executable. */
2684 if (!flag_no_common
2685 && TREE_CODE (var_decl) == VAR_DECL
2686 && TREE_PUBLIC (var_decl)
2687 && !have_global_bss_p ())
2688 DECL_COMMON (var_decl) = 1;
2689
2690 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2691 since we will create an associated variable. Likewise for an external
2692 constant whose initializer is not absolute, because this would mean a
2693 global relocation in a read-only section which runs afoul of the PE-COFF
2694 run-time relocation mechanism. */
2695 if (!debug_info_p
2696 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2697 || (extern_flag
2698 && constant_p
2699 && init
2700 && initializer_constant_valid_p (init, TREE_TYPE (init))
2701 != null_pointer_node))
2702 DECL_IGNORED_P (var_decl) = 1;
2703
2704 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2705 if (TREE_CODE (var_decl) == VAR_DECL)
2706 process_attributes (&var_decl, &attr_list, true, gnat_node);
2707
2708 /* Add this decl to the current binding level. */
2709 gnat_pushdecl (var_decl, gnat_node);
2710
2711 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2712 {
2713 /* Let the target mangle the name if this isn't a verbatim asm. */
2714 if (*IDENTIFIER_POINTER (asm_name) != '*')
2715 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2716
2717 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2718 }
2719
2720 return var_decl;
2721 }
2722 \f
2723 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2724
2725 static bool
2726 aggregate_type_contains_array_p (tree type)
2727 {
2728 switch (TREE_CODE (type))
2729 {
2730 case RECORD_TYPE:
2731 case UNION_TYPE:
2732 case QUAL_UNION_TYPE:
2733 {
2734 tree field;
2735 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2736 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2737 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2738 return true;
2739 return false;
2740 }
2741
2742 case ARRAY_TYPE:
2743 return true;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748 }
2749
2750 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2751 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2752 is the specified size of the field. If POS is nonzero, it is the bit
2753 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2754 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2755 means we are allowed to take the address of the field; if it is negative,
2756 we should not make a bitfield, which is used by make_aligning_type. */
2757
2758 tree
2759 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2760 int packed, int addressable)
2761 {
2762 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2763
2764 DECL_CONTEXT (field_decl) = record_type;
2765 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2766
2767 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2768 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2769 Likewise for an aggregate without specified position that contains an
2770 array, because in this case slices of variable length of this array
2771 must be handled by GCC and variable-sized objects need to be aligned
2772 to at least a byte boundary. */
2773 if (packed && (TYPE_MODE (type) == BLKmode
2774 || (!pos
2775 && AGGREGATE_TYPE_P (type)
2776 && aggregate_type_contains_array_p (type))))
2777 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2778
2779 /* If a size is specified, use it. Otherwise, if the record type is packed
2780 compute a size to use, which may differ from the object's natural size.
2781 We always set a size in this case to trigger the checks for bitfield
2782 creation below, which is typically required when no position has been
2783 specified. */
2784 if (size)
2785 size = convert (bitsizetype, size);
2786 else if (packed == 1)
2787 {
2788 size = rm_size (type);
2789 if (TYPE_MODE (type) == BLKmode)
2790 size = round_up (size, BITS_PER_UNIT);
2791 }
2792
2793 /* If we may, according to ADDRESSABLE, make a bitfield when the size is
2794 specified for two reasons: first if the size differs from the natural
2795 size; second, if the alignment is insufficient. There are a number of
2796 ways the latter can be true.
2797
2798 We never make a bitfield if the type of the field has a nonconstant size,
2799 because no such entity requiring bitfield operations should reach here.
2800
2801 We do *preventively* make a bitfield when there might be the need for it
2802 but we don't have all the necessary information to decide, as is the case
2803 of a field in a packed record.
2804
2805 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2806 in layout_decl or finish_record_type to clear the bit_field indication if
2807 it is in fact not needed. */
2808 if (addressable >= 0
2809 && size
2810 && TREE_CODE (size) == INTEGER_CST
2811 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2812 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2813 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2814 || packed
2815 || (TYPE_ALIGN (record_type) != 0
2816 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2817 {
2818 DECL_BIT_FIELD (field_decl) = 1;
2819 DECL_SIZE (field_decl) = size;
2820 if (!packed && !pos)
2821 {
2822 if (TYPE_ALIGN (record_type) != 0
2823 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2824 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2825 else
2826 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2827 }
2828 }
2829
2830 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2831
2832 /* Bump the alignment if need be, either for bitfield/packing purposes or
2833 to satisfy the type requirements if no such consideration applies. When
2834 we get the alignment from the type, indicate if this is from an explicit
2835 user request, which prevents stor-layout from lowering it later on. */
2836 {
2837 unsigned int bit_align
2838 = (DECL_BIT_FIELD (field_decl) ? 1
2839 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2840
2841 if (bit_align > DECL_ALIGN (field_decl))
2842 SET_DECL_ALIGN (field_decl, bit_align);
2843 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2844 {
2845 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2846 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2847 }
2848 }
2849
2850 if (pos)
2851 {
2852 /* We need to pass in the alignment the DECL is known to have.
2853 This is the lowest-order bit set in POS, but no more than
2854 the alignment of the record, if one is specified. Note
2855 that an alignment of 0 is taken as infinite. */
2856 unsigned int known_align;
2857
2858 if (tree_fits_uhwi_p (pos))
2859 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2860 else
2861 known_align = BITS_PER_UNIT;
2862
2863 if (TYPE_ALIGN (record_type)
2864 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2865 known_align = TYPE_ALIGN (record_type);
2866
2867 layout_decl (field_decl, known_align);
2868 SET_DECL_OFFSET_ALIGN (field_decl,
2869 tree_fits_uhwi_p (pos)
2870 ? BIGGEST_ALIGNMENT : BITS_PER_UNIT);
2871 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2872 &DECL_FIELD_BIT_OFFSET (field_decl),
2873 DECL_OFFSET_ALIGN (field_decl), pos);
2874 }
2875
2876 /* In addition to what our caller says, claim the field is addressable if we
2877 know that its type is not suitable.
2878
2879 The field may also be "technically" nonaddressable, meaning that even if
2880 we attempt to take the field's address we will actually get the address
2881 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2882 value we have at this point is not accurate enough, so we don't account
2883 for this here and let finish_record_type decide. */
2884 if (!addressable && !type_for_nonaliased_component_p (type))
2885 addressable = 1;
2886
2887 /* Note that there is a trade-off in making a field nonaddressable because
2888 this will cause type-based alias analysis to use the same alias set for
2889 accesses to the field as for accesses to the whole record: while doing
2890 so will make it more likely to disambiguate accesses to other objects
2891 and accesses to the field, it will make it less likely to disambiguate
2892 accesses to the other fields of the record and accesses to the field.
2893 If the record is fully static, then the trade-off is irrelevant since
2894 the fields of the record can always be disambiguated by their offsets
2895 but, if the record is dynamic, then it can become problematic. */
2896 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2897
2898 return field_decl;
2899 }
2900 \f
2901 /* Return a PARM_DECL node with NAME and TYPE. */
2902
2903 tree
2904 create_param_decl (tree name, tree type)
2905 {
2906 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2907
2908 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2909 can lead to various ABI violations. */
2910 if (targetm.calls.promote_prototypes (NULL_TREE)
2911 && INTEGRAL_TYPE_P (type)
2912 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2913 {
2914 /* We have to be careful about biased types here. Make a subtype
2915 of integer_type_node with the proper biasing. */
2916 if (TREE_CODE (type) == INTEGER_TYPE
2917 && TYPE_BIASED_REPRESENTATION_P (type))
2918 {
2919 tree subtype
2920 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2921 TREE_TYPE (subtype) = integer_type_node;
2922 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2923 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2924 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2925 type = subtype;
2926 }
2927 else
2928 type = integer_type_node;
2929 }
2930
2931 DECL_ARG_TYPE (param_decl) = type;
2932 return param_decl;
2933 }
2934 \f
2935 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2936 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2937 changed. GNAT_NODE is used for the position of error messages. */
2938
2939 void
2940 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2941 Node_Id gnat_node)
2942 {
2943 struct attrib *attr;
2944
2945 for (attr = *attr_list; attr; attr = attr->next)
2946 switch (attr->type)
2947 {
2948 case ATTR_MACHINE_ATTRIBUTE:
2949 Sloc_to_locus (Sloc (gnat_node), &input_location);
2950 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2951 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2952 break;
2953
2954 case ATTR_LINK_ALIAS:
2955 if (!DECL_EXTERNAL (*node))
2956 {
2957 TREE_STATIC (*node) = 1;
2958 assemble_alias (*node, attr->name);
2959 }
2960 break;
2961
2962 case ATTR_WEAK_EXTERNAL:
2963 if (SUPPORTS_WEAK)
2964 declare_weak (*node);
2965 else
2966 post_error ("?weak declarations not supported on this target",
2967 attr->error_point);
2968 break;
2969
2970 case ATTR_LINK_SECTION:
2971 if (targetm_common.have_named_sections)
2972 {
2973 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2974 DECL_COMMON (*node) = 0;
2975 }
2976 else
2977 post_error ("?section attributes are not supported for this target",
2978 attr->error_point);
2979 break;
2980
2981 case ATTR_LINK_CONSTRUCTOR:
2982 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2983 TREE_USED (*node) = 1;
2984 break;
2985
2986 case ATTR_LINK_DESTRUCTOR:
2987 DECL_STATIC_DESTRUCTOR (*node) = 1;
2988 TREE_USED (*node) = 1;
2989 break;
2990
2991 case ATTR_THREAD_LOCAL_STORAGE:
2992 set_decl_tls_model (*node, decl_default_tls_model (*node));
2993 DECL_COMMON (*node) = 0;
2994 break;
2995 }
2996
2997 *attr_list = NULL;
2998 }
2999
3000 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
3001 a power of 2. */
3002
3003 bool
3004 value_factor_p (tree value, HOST_WIDE_INT factor)
3005 {
3006 if (tree_fits_uhwi_p (value))
3007 return tree_to_uhwi (value) % factor == 0;
3008
3009 if (TREE_CODE (value) == MULT_EXPR)
3010 return (value_factor_p (TREE_OPERAND (value, 0), factor)
3011 || value_factor_p (TREE_OPERAND (value, 1), factor));
3012
3013 return false;
3014 }
3015
3016 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
3017 feed it with the elaboration of GNAT_SCOPE. */
3018
3019 static struct deferred_decl_context_node *
3020 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
3021 {
3022 struct deferred_decl_context_node *new_node;
3023
3024 new_node
3025 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
3026 new_node->decl = decl;
3027 new_node->gnat_scope = gnat_scope;
3028 new_node->force_global = force_global;
3029 new_node->types.create (1);
3030 new_node->next = deferred_decl_context_queue;
3031 deferred_decl_context_queue = new_node;
3032 return new_node;
3033 }
3034
3035 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
3036 feed it with the DECL_CONTEXT computed as part of N as soon as it is
3037 computed. */
3038
3039 static void
3040 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
3041 {
3042 n->types.safe_push (type);
3043 }
3044
3045 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
3046 NULL_TREE if it is not available. */
3047
3048 static tree
3049 compute_deferred_decl_context (Entity_Id gnat_scope)
3050 {
3051 tree context;
3052
3053 if (present_gnu_tree (gnat_scope))
3054 context = get_gnu_tree (gnat_scope);
3055 else
3056 return NULL_TREE;
3057
3058 if (TREE_CODE (context) == TYPE_DECL)
3059 {
3060 const tree context_type = TREE_TYPE (context);
3061
3062 /* Skip dummy types: only the final ones can appear in the context
3063 chain. */
3064 if (TYPE_DUMMY_P (context_type))
3065 return NULL_TREE;
3066
3067 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3068 chain. */
3069 else
3070 context = context_type;
3071 }
3072
3073 return context;
3074 }
3075
3076 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3077 that cannot be processed yet, remove the other ones. If FORCE is true,
3078 force the processing for all nodes, use the global context when nodes don't
3079 have a GNU translation. */
3080
3081 void
3082 process_deferred_decl_context (bool force)
3083 {
3084 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3085 struct deferred_decl_context_node *node;
3086
3087 while (*it)
3088 {
3089 bool processed = false;
3090 tree context = NULL_TREE;
3091 Entity_Id gnat_scope;
3092
3093 node = *it;
3094
3095 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3096 get the first scope. */
3097 gnat_scope = node->gnat_scope;
3098 while (Present (gnat_scope))
3099 {
3100 context = compute_deferred_decl_context (gnat_scope);
3101 if (!force || context)
3102 break;
3103 gnat_scope = get_debug_scope (gnat_scope, NULL);
3104 }
3105
3106 /* Imported declarations must not be in a local context (i.e. not inside
3107 a function). */
3108 if (context && node->force_global > 0)
3109 {
3110 tree ctx = context;
3111
3112 while (ctx)
3113 {
3114 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3115 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3116 }
3117 }
3118
3119 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3120 was no elaborated scope, use the global context. */
3121 if (force && !context)
3122 context = get_global_context ();
3123
3124 if (context)
3125 {
3126 tree t;
3127 int i;
3128
3129 DECL_CONTEXT (node->decl) = context;
3130
3131 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3132 ..._TYPE nodes. */
3133 FOR_EACH_VEC_ELT (node->types, i, t)
3134 {
3135 gnat_set_type_context (t, context);
3136 }
3137 processed = true;
3138 }
3139
3140 /* If this node has been successfuly processed, remove it from the
3141 queue. Then move to the next node. */
3142 if (processed)
3143 {
3144 *it = node->next;
3145 node->types.release ();
3146 free (node);
3147 }
3148 else
3149 it = &node->next;
3150 }
3151 }
3152
3153 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3154
3155 static unsigned int
3156 scale_by_factor_of (tree expr, unsigned int value)
3157 {
3158 unsigned HOST_WIDE_INT addend = 0;
3159 unsigned HOST_WIDE_INT factor = 1;
3160
3161 /* Peel conversions around EXPR and try to extract bodies from function
3162 calls: it is possible to get the scale factor from size functions. */
3163 expr = remove_conversions (expr, true);
3164 if (TREE_CODE (expr) == CALL_EXPR)
3165 expr = maybe_inline_call_in_expr (expr);
3166
3167 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3168 multiple of the scale factor we are looking for. */
3169 if (TREE_CODE (expr) == PLUS_EXPR
3170 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3171 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3172 {
3173 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3174 expr = TREE_OPERAND (expr, 0);
3175 }
3176
3177 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3178 corresponding to the number of trailing zeros of the mask. */
3179 if (TREE_CODE (expr) == BIT_AND_EXPR
3180 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3181 {
3182 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3183 unsigned int i = 0;
3184
3185 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3186 {
3187 mask >>= 1;
3188 factor *= 2;
3189 i++;
3190 }
3191 }
3192
3193 /* If the addend is not a multiple of the factor we found, give up. In
3194 theory we could find a smaller common factor but it's useless for our
3195 needs. This situation arises when dealing with a field F1 with no
3196 alignment requirement but that is following a field F2 with such
3197 requirements. As long as we have F2's offset, we don't need alignment
3198 information to compute F1's. */
3199 if (addend % factor != 0)
3200 factor = 1;
3201
3202 return factor * value;
3203 }
3204
3205 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3206 unless we can prove these 2 fields are laid out in such a way that no gap
3207 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3208 is the distance in bits between the end of PREV_FIELD and the starting
3209 position of CURR_FIELD. It is ignored if null. */
3210
3211 static bool
3212 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3213 {
3214 /* If this is the first field of the record, there cannot be any gap */
3215 if (!prev_field)
3216 return false;
3217
3218 /* If the previous field is a union type, then return false: The only
3219 time when such a field is not the last field of the record is when
3220 there are other components at fixed positions after it (meaning there
3221 was a rep clause for every field), in which case we don't want the
3222 alignment constraint to override them. */
3223 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3224 return false;
3225
3226 /* If the distance between the end of prev_field and the beginning of
3227 curr_field is constant, then there is a gap if the value of this
3228 constant is not null. */
3229 if (offset && tree_fits_uhwi_p (offset))
3230 return !integer_zerop (offset);
3231
3232 /* If the size and position of the previous field are constant,
3233 then check the sum of this size and position. There will be a gap
3234 iff it is not multiple of the current field alignment. */
3235 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3236 && tree_fits_uhwi_p (bit_position (prev_field)))
3237 return ((tree_to_uhwi (bit_position (prev_field))
3238 + tree_to_uhwi (DECL_SIZE (prev_field)))
3239 % DECL_ALIGN (curr_field) != 0);
3240
3241 /* If both the position and size of the previous field are multiples
3242 of the current field alignment, there cannot be any gap. */
3243 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3244 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3245 return false;
3246
3247 /* Fallback, return that there may be a potential gap */
3248 return true;
3249 }
3250
3251 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3252 the decl. */
3253
3254 tree
3255 create_label_decl (tree name, Node_Id gnat_node)
3256 {
3257 tree label_decl
3258 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3259
3260 SET_DECL_MODE (label_decl, VOIDmode);
3261
3262 /* Add this decl to the current binding level. */
3263 gnat_pushdecl (label_decl, gnat_node);
3264
3265 return label_decl;
3266 }
3267 \f
3268 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3269 its assembler name, TYPE its type (a FUNCTION_TYPE or METHOD_TYPE node),
3270 PARAM_DECL_LIST the list of its parameters (a list of PARM_DECL nodes
3271 chained through the DECL_CHAIN field).
3272
3273 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3274
3275 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3276 definition to be made visible outside of the current compilation unit.
3277
3278 EXTERN_FLAG is true when processing an external subprogram declaration.
3279
3280 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3281
3282 DEBUG_INFO_P is true if we need to write debug information for it.
3283
3284 DEFINITION is true if the subprogram is to be considered as a definition.
3285
3286 ATTR_LIST is the list of attributes to be attached to the subprogram.
3287
3288 GNAT_NODE is used for the position of the decl. */
3289
3290 tree
3291 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3292 enum inline_status_t inline_status, bool public_flag,
3293 bool extern_flag, bool artificial_p, bool debug_info_p,
3294 bool definition, struct attrib *attr_list,
3295 Node_Id gnat_node)
3296 {
3297 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3298 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3299
3300 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3301 DECL_EXTERNAL (subprog_decl) = extern_flag;
3302 DECL_FUNCTION_IS_DEF (subprog_decl) = definition;
3303 DECL_IGNORED_P (subprog_decl) = !debug_info_p;
3304 TREE_PUBLIC (subprog_decl) = public_flag;
3305
3306 switch (inline_status)
3307 {
3308 case is_suppressed:
3309 DECL_UNINLINABLE (subprog_decl) = 1;
3310 break;
3311
3312 case is_default:
3313 break;
3314
3315 case is_required:
3316 if (Back_End_Inlining)
3317 {
3318 decl_attributes (&subprog_decl,
3319 tree_cons (get_identifier ("always_inline"),
3320 NULL_TREE, NULL_TREE),
3321 ATTR_FLAG_TYPE_IN_PLACE);
3322
3323 /* Inline_Always guarantees that every direct call is inlined and
3324 that there is no indirect reference to the subprogram, so the
3325 instance in the original package (as well as its clones in the
3326 client packages created for inter-unit inlining) can be made
3327 private, which causes the out-of-line body to be eliminated. */
3328 TREE_PUBLIC (subprog_decl) = 0;
3329 }
3330
3331 /* ... fall through ... */
3332
3333 case is_prescribed:
3334 DECL_DISREGARD_INLINE_LIMITS (subprog_decl) = 1;
3335
3336 /* ... fall through ... */
3337
3338 case is_requested:
3339 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3340 if (!Debug_Generated_Code)
3341 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3342 break;
3343
3344 default:
3345 gcc_unreachable ();
3346 }
3347
3348 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3349
3350 /* Once everything is processed, finish the subprogram declaration. */
3351 finish_subprog_decl (subprog_decl, asm_name, type);
3352
3353 /* Add this decl to the current binding level. */
3354 gnat_pushdecl (subprog_decl, gnat_node);
3355
3356 /* Output the assembler code and/or RTL for the declaration. */
3357 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3358
3359 return subprog_decl;
3360 }
3361
3362 /* Given a subprogram declaration DECL, its assembler name and its type,
3363 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3364
3365 void
3366 finish_subprog_decl (tree decl, tree asm_name, tree type)
3367 {
3368 tree result_decl
3369 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3370 TREE_TYPE (type));
3371
3372 DECL_ARTIFICIAL (result_decl) = 1;
3373 DECL_IGNORED_P (result_decl) = 1;
3374 DECL_CONTEXT (result_decl) = decl;
3375 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3376 DECL_RESULT (decl) = result_decl;
3377
3378 /* Propagate the "const" property. */
3379 TREE_READONLY (decl) = TYPE_READONLY (type);
3380
3381 /* Propagate the "pure" property. */
3382 DECL_PURE_P (decl) = TYPE_RESTRICT (type);
3383
3384 /* Propagate the "noreturn" property. */
3385 TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3386
3387 if (asm_name)
3388 {
3389 /* Let the target mangle the name if this isn't a verbatim asm. */
3390 if (*IDENTIFIER_POINTER (asm_name) != '*')
3391 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3392
3393 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3394
3395 /* The expand_main_function circuitry expects "main_identifier_node" to
3396 designate the DECL_NAME of the 'main' entry point, in turn expected
3397 to be declared as the "main" function literally by default. Ada
3398 program entry points are typically declared with a different name
3399 within the binder generated file, exported as 'main' to satisfy the
3400 system expectations. Force main_identifier_node in this case. */
3401 if (asm_name == main_identifier_node)
3402 DECL_NAME (decl) = main_identifier_node;
3403 }
3404 }
3405 \f
3406 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3407 body. This routine needs to be invoked before processing the declarations
3408 appearing in the subprogram. */
3409
3410 void
3411 begin_subprog_body (tree subprog_decl)
3412 {
3413 tree param_decl;
3414
3415 announce_function (subprog_decl);
3416
3417 /* This function is being defined. */
3418 TREE_STATIC (subprog_decl) = 1;
3419
3420 /* The failure of this assertion will likely come from a wrong context for
3421 the subprogram body, e.g. another procedure for a procedure declared at
3422 library level. */
3423 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3424
3425 current_function_decl = subprog_decl;
3426
3427 /* Enter a new binding level and show that all the parameters belong to
3428 this function. */
3429 gnat_pushlevel ();
3430
3431 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3432 param_decl = DECL_CHAIN (param_decl))
3433 DECL_CONTEXT (param_decl) = subprog_decl;
3434 }
3435
3436 /* Finish translating the current subprogram and set its BODY. */
3437
3438 void
3439 end_subprog_body (tree body)
3440 {
3441 tree fndecl = current_function_decl;
3442
3443 /* Attach the BLOCK for this level to the function and pop the level. */
3444 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3445 DECL_INITIAL (fndecl) = current_binding_level->block;
3446 gnat_poplevel ();
3447
3448 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3449 if (TREE_CODE (body) == BIND_EXPR)
3450 {
3451 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3452 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3453 }
3454
3455 DECL_SAVED_TREE (fndecl) = body;
3456
3457 current_function_decl = decl_function_context (fndecl);
3458 }
3459
3460 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3461
3462 void
3463 rest_of_subprog_body_compilation (tree subprog_decl)
3464 {
3465 /* We cannot track the location of errors past this point. */
3466 Current_Error_Node = Empty;
3467
3468 /* If we're only annotating types, don't actually compile this function. */
3469 if (type_annotate_only)
3470 return;
3471
3472 /* Dump functions before gimplification. */
3473 dump_function (TDI_original, subprog_decl);
3474
3475 if (!decl_function_context (subprog_decl))
3476 cgraph_node::finalize_function (subprog_decl, false);
3477 else
3478 /* Register this function with cgraph just far enough to get it
3479 added to our parent's nested function list. */
3480 (void) cgraph_node::get_create (subprog_decl);
3481 }
3482
3483 tree
3484 gnat_builtin_function (tree decl)
3485 {
3486 gnat_pushdecl (decl, Empty);
3487 return decl;
3488 }
3489
3490 /* Return an integer type with the number of bits of precision given by
3491 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3492 it is a signed type. */
3493
3494 tree
3495 gnat_type_for_size (unsigned precision, int unsignedp)
3496 {
3497 tree t;
3498 char type_name[20];
3499
3500 if (precision <= 2 * MAX_BITS_PER_WORD
3501 && signed_and_unsigned_types[precision][unsignedp])
3502 return signed_and_unsigned_types[precision][unsignedp];
3503
3504 if (unsignedp)
3505 t = make_unsigned_type (precision);
3506 else
3507 t = make_signed_type (precision);
3508 TYPE_ARTIFICIAL (t) = 1;
3509
3510 if (precision <= 2 * MAX_BITS_PER_WORD)
3511 signed_and_unsigned_types[precision][unsignedp] = t;
3512
3513 if (!TYPE_NAME (t))
3514 {
3515 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3516 TYPE_NAME (t) = get_identifier (type_name);
3517 }
3518
3519 return t;
3520 }
3521
3522 /* Likewise for floating-point types. */
3523
3524 static tree
3525 float_type_for_precision (int precision, machine_mode mode)
3526 {
3527 tree t;
3528 char type_name[20];
3529
3530 if (float_types[(int) mode])
3531 return float_types[(int) mode];
3532
3533 float_types[(int) mode] = t = make_node (REAL_TYPE);
3534 TYPE_PRECISION (t) = precision;
3535 layout_type (t);
3536
3537 gcc_assert (TYPE_MODE (t) == mode);
3538 if (!TYPE_NAME (t))
3539 {
3540 sprintf (type_name, "FLOAT_%d", precision);
3541 TYPE_NAME (t) = get_identifier (type_name);
3542 }
3543
3544 return t;
3545 }
3546
3547 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3548 an unsigned type; otherwise a signed type is returned. */
3549
3550 tree
3551 gnat_type_for_mode (machine_mode mode, int unsignedp)
3552 {
3553 if (mode == BLKmode)
3554 return NULL_TREE;
3555
3556 if (mode == VOIDmode)
3557 return void_type_node;
3558
3559 if (COMPLEX_MODE_P (mode))
3560 return NULL_TREE;
3561
3562 scalar_float_mode float_mode;
3563 if (is_a <scalar_float_mode> (mode, &float_mode))
3564 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3565 float_mode);
3566
3567 scalar_int_mode int_mode;
3568 if (is_a <scalar_int_mode> (mode, &int_mode))
3569 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3570
3571 if (VECTOR_MODE_P (mode))
3572 {
3573 machine_mode inner_mode = GET_MODE_INNER (mode);
3574 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3575 if (inner_type)
3576 return build_vector_type_for_mode (inner_type, mode);
3577 }
3578
3579 return NULL_TREE;
3580 }
3581
3582 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3583 signedness being specified by UNSIGNEDP. */
3584
3585 tree
3586 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3587 {
3588 if (type_node == char_type_node)
3589 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3590
3591 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3592
3593 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3594 {
3595 type = copy_type (type);
3596 TREE_TYPE (type) = type_node;
3597 }
3598 else if (TREE_TYPE (type_node)
3599 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3600 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3601 {
3602 type = copy_type (type);
3603 TREE_TYPE (type) = TREE_TYPE (type_node);
3604 }
3605
3606 return type;
3607 }
3608
3609 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3610 transparently converted to each other. */
3611
3612 int
3613 gnat_types_compatible_p (tree t1, tree t2)
3614 {
3615 enum tree_code code;
3616
3617 /* This is the default criterion. */
3618 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3619 return 1;
3620
3621 /* We only check structural equivalence here. */
3622 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3623 return 0;
3624
3625 /* Vector types are also compatible if they have the same number of subparts
3626 and the same form of (scalar) element type. */
3627 if (code == VECTOR_TYPE
3628 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3629 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3630 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3631 return 1;
3632
3633 /* Array types are also compatible if they are constrained and have the same
3634 domain(s), the same component type and the same scalar storage order. */
3635 if (code == ARRAY_TYPE
3636 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3637 || (TYPE_DOMAIN (t1)
3638 && TYPE_DOMAIN (t2)
3639 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3640 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3641 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3642 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3643 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3644 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3645 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3646 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3647 return 1;
3648
3649 return 0;
3650 }
3651
3652 /* Return true if EXPR is a useless type conversion. */
3653
3654 bool
3655 gnat_useless_type_conversion (tree expr)
3656 {
3657 if (CONVERT_EXPR_P (expr)
3658 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3659 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3660 return gnat_types_compatible_p (TREE_TYPE (expr),
3661 TREE_TYPE (TREE_OPERAND (expr, 0)));
3662
3663 return false;
3664 }
3665
3666 /* Return true if T, a {FUNCTION,METHOD}_TYPE, has the specified flags. */
3667
3668 bool
3669 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3670 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3671 {
3672 return TYPE_CI_CO_LIST (t) == cico_list
3673 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3674 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3675 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3676 }
3677 \f
3678 /* EXP is an expression for the size of an object. If this size contains
3679 discriminant references, replace them with the maximum (if MAX_P) or
3680 minimum (if !MAX_P) possible value of the discriminant.
3681
3682 Note that the expression may have already been gimplified,in which case
3683 COND_EXPRs have VOID_TYPE and no operands, and this must be handled. */
3684
3685 tree
3686 max_size (tree exp, bool max_p)
3687 {
3688 enum tree_code code = TREE_CODE (exp);
3689 tree type = TREE_TYPE (exp);
3690 tree op0, op1, op2;
3691
3692 switch (TREE_CODE_CLASS (code))
3693 {
3694 case tcc_declaration:
3695 case tcc_constant:
3696 return exp;
3697
3698 case tcc_exceptional:
3699 gcc_assert (code == SSA_NAME);
3700 return exp;
3701
3702 case tcc_vl_exp:
3703 if (code == CALL_EXPR)
3704 {
3705 tree t, *argarray;
3706 int n, i;
3707
3708 t = maybe_inline_call_in_expr (exp);
3709 if (t)
3710 return max_size (t, max_p);
3711
3712 n = call_expr_nargs (exp);
3713 gcc_assert (n > 0);
3714 argarray = XALLOCAVEC (tree, n);
3715 for (i = 0; i < n; i++)
3716 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3717 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3718 }
3719 break;
3720
3721 case tcc_reference:
3722 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3723 modify. Otherwise, we treat it like a variable. */
3724 if (CONTAINS_PLACEHOLDER_P (exp))
3725 {
3726 tree base_type = get_base_type (TREE_TYPE (TREE_OPERAND (exp, 1)));
3727 tree val
3728 = fold_convert (base_type,
3729 max_p
3730 ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3731
3732 /* Walk down the extra subtypes to get more restrictive bounds. */
3733 while (TYPE_IS_EXTRA_SUBTYPE_P (type))
3734 {
3735 type = TREE_TYPE (type);
3736 if (max_p)
3737 val = fold_build2 (MIN_EXPR, base_type, val,
3738 fold_convert (base_type,
3739 TYPE_MAX_VALUE (type)));
3740 else
3741 val = fold_build2 (MAX_EXPR, base_type, val,
3742 fold_convert (base_type,
3743 TYPE_MIN_VALUE (type)));
3744 }
3745
3746 return fold_convert (type, max_size (val, max_p));
3747 }
3748
3749 return exp;
3750
3751 case tcc_comparison:
3752 return build_int_cst (type, max_p ? 1 : 0);
3753
3754 case tcc_unary:
3755 op0 = TREE_OPERAND (exp, 0);
3756
3757 if (code == NON_LVALUE_EXPR)
3758 return max_size (op0, max_p);
3759
3760 if (VOID_TYPE_P (TREE_TYPE (op0)))
3761 return max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type);
3762
3763 op0 = max_size (op0, code == NEGATE_EXPR ? !max_p : max_p);
3764
3765 if (op0 == TREE_OPERAND (exp, 0))
3766 return exp;
3767
3768 return fold_build1 (code, type, op0);
3769
3770 case tcc_binary:
3771 op0 = TREE_OPERAND (exp, 0);
3772 op1 = TREE_OPERAND (exp, 1);
3773
3774 /* If we have a multiply-add with a "negative" value in an unsigned
3775 type, do a multiply-subtract with the negated value, in order to
3776 avoid creating a spurious overflow below. */
3777 if (code == PLUS_EXPR
3778 && TREE_CODE (op0) == MULT_EXPR
3779 && TYPE_UNSIGNED (type)
3780 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
3781 && !TREE_OVERFLOW (TREE_OPERAND (op0, 1))
3782 && tree_int_cst_sign_bit (TREE_OPERAND (op0, 1)))
3783 {
3784 tree tmp = op1;
3785 op1 = build2 (MULT_EXPR, type, TREE_OPERAND (op0, 0),
3786 fold_build1 (NEGATE_EXPR, type,
3787 TREE_OPERAND (op0, 1)));
3788 op0 = tmp;
3789 code = MINUS_EXPR;
3790 }
3791
3792 op0 = max_size (op0, max_p);
3793 op1 = max_size (op1, code == MINUS_EXPR ? !max_p : max_p);
3794
3795 if ((code == MINUS_EXPR || code == PLUS_EXPR))
3796 {
3797 /* If the op0 has overflowed and the op1 is a variable,
3798 propagate the overflow by returning the op0. */
3799 if (TREE_CODE (op0) == INTEGER_CST
3800 && TREE_OVERFLOW (op0)
3801 && TREE_CODE (op1) != INTEGER_CST)
3802 return op0;
3803
3804 /* If we have a "negative" value in an unsigned type, do the
3805 opposite operation on the negated value, in order to avoid
3806 creating a spurious overflow below. */
3807 if (TYPE_UNSIGNED (type)
3808 && TREE_CODE (op1) == INTEGER_CST
3809 && !TREE_OVERFLOW (op1)
3810 && tree_int_cst_sign_bit (op1))
3811 {
3812 op1 = fold_build1 (NEGATE_EXPR, type, op1);
3813 code = (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR);
3814 }
3815 }
3816
3817 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3818 return exp;
3819
3820 /* We need to detect overflows so we call size_binop here. */
3821 return size_binop (code, op0, op1);
3822
3823 case tcc_expression:
3824 switch (TREE_CODE_LENGTH (code))
3825 {
3826 case 1:
3827 if (code == SAVE_EXPR)
3828 return exp;
3829
3830 op0 = max_size (TREE_OPERAND (exp, 0),
3831 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3832
3833 if (op0 == TREE_OPERAND (exp, 0))
3834 return exp;
3835
3836 return fold_build1 (code, type, op0);
3837
3838 case 2:
3839 if (code == COMPOUND_EXPR)
3840 return max_size (TREE_OPERAND (exp, 1), max_p);
3841
3842 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3843 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3844
3845 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3846 return exp;
3847
3848 return fold_build2 (code, type, op0, op1);
3849
3850 case 3:
3851 if (code == COND_EXPR)
3852 {
3853 op0 = TREE_OPERAND (exp, 0);
3854 op1 = TREE_OPERAND (exp, 1);
3855 op2 = TREE_OPERAND (exp, 2);
3856
3857 if (!op1 || !op2)
3858 return exp;
3859
3860 op1 = max_size (op1, max_p);
3861 op2 = max_size (op2, max_p);
3862
3863 /* If we have the MAX of a "negative" value in an unsigned type
3864 and zero for a length expression, just return zero. */
3865 if (max_p
3866 && TREE_CODE (op0) == LE_EXPR
3867 && TYPE_UNSIGNED (type)
3868 && TREE_CODE (op1) == INTEGER_CST
3869 && !TREE_OVERFLOW (op1)
3870 && tree_int_cst_sign_bit (op1)
3871 && integer_zerop (op2))
3872 return op2;
3873
3874 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type, op1, op2);
3875 }
3876 break;
3877
3878 default:
3879 break;
3880 }
3881
3882 /* Other tree classes cannot happen. */
3883 default:
3884 break;
3885 }
3886
3887 gcc_unreachable ();
3888 }
3889 \f
3890 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3891 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3892 Return a constructor for the template. */
3893
3894 tree
3895 build_template (tree template_type, tree array_type, tree expr)
3896 {
3897 vec<constructor_elt, va_gc> *template_elts = NULL;
3898 tree bound_list = NULL_TREE;
3899 tree field;
3900
3901 while (TREE_CODE (array_type) == RECORD_TYPE
3902 && (TYPE_PADDING_P (array_type)
3903 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3904 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3905
3906 if (TREE_CODE (array_type) == ARRAY_TYPE
3907 || (TREE_CODE (array_type) == INTEGER_TYPE
3908 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3909 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3910
3911 /* First make the list for a CONSTRUCTOR for the template. Go down the
3912 field list of the template instead of the type chain because this
3913 array might be an Ada array of arrays and we can't tell where the
3914 nested arrays stop being the underlying object. */
3915
3916 for (field = TYPE_FIELDS (template_type); field;
3917 (bound_list
3918 ? (bound_list = TREE_CHAIN (bound_list))
3919 : (array_type = TREE_TYPE (array_type))),
3920 field = DECL_CHAIN (DECL_CHAIN (field)))
3921 {
3922 tree bounds, min, max;
3923
3924 /* If we have a bound list, get the bounds from there. Likewise
3925 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3926 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3927 This will give us a maximum range. */
3928 if (bound_list)
3929 bounds = TREE_VALUE (bound_list);
3930 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3931 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3932 else if (expr && TREE_CODE (expr) == PARM_DECL
3933 && DECL_BY_COMPONENT_PTR_P (expr))
3934 bounds = TREE_TYPE (field);
3935 else
3936 gcc_unreachable ();
3937
3938 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3939 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3940
3941 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3942 substitute it from OBJECT. */
3943 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3944 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3945
3946 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3947 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3948 }
3949
3950 return gnat_build_constructor (template_type, template_elts);
3951 }
3952 \f
3953 /* Return true if TYPE is suitable for the element type of a vector. */
3954
3955 static bool
3956 type_for_vector_element_p (tree type)
3957 {
3958 machine_mode mode;
3959
3960 if (!INTEGRAL_TYPE_P (type)
3961 && !SCALAR_FLOAT_TYPE_P (type)
3962 && !FIXED_POINT_TYPE_P (type))
3963 return false;
3964
3965 mode = TYPE_MODE (type);
3966 if (GET_MODE_CLASS (mode) != MODE_INT
3967 && !SCALAR_FLOAT_MODE_P (mode)
3968 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3969 return false;
3970
3971 return true;
3972 }
3973
3974 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3975 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3976 attribute declaration and want to issue error messages on failure. */
3977
3978 static tree
3979 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3980 {
3981 unsigned HOST_WIDE_INT size_int, inner_size_int;
3982 int nunits;
3983
3984 /* Silently punt on variable sizes. We can't make vector types for them,
3985 need to ignore them on front-end generated subtypes of unconstrained
3986 base types, and this attribute is for binding implementors, not end
3987 users, so we should never get there from legitimate explicit uses. */
3988 if (!tree_fits_uhwi_p (size))
3989 return NULL_TREE;
3990 size_int = tree_to_uhwi (size);
3991
3992 if (!type_for_vector_element_p (inner_type))
3993 {
3994 if (attribute)
3995 error ("invalid element type for attribute %qs",
3996 IDENTIFIER_POINTER (attribute));
3997 return NULL_TREE;
3998 }
3999 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
4000
4001 if (size_int % inner_size_int)
4002 {
4003 if (attribute)
4004 error ("vector size not an integral multiple of component size");
4005 return NULL_TREE;
4006 }
4007
4008 if (size_int == 0)
4009 {
4010 if (attribute)
4011 error ("zero vector size");
4012 return NULL_TREE;
4013 }
4014
4015 nunits = size_int / inner_size_int;
4016 if (nunits & (nunits - 1))
4017 {
4018 if (attribute)
4019 error ("number of components of vector not a power of two");
4020 return NULL_TREE;
4021 }
4022
4023 return build_vector_type (inner_type, nunits);
4024 }
4025
4026 /* Return a vector type whose representative array type is ARRAY_TYPE, or
4027 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
4028 processing the attribute and want to issue error messages on failure. */
4029
4030 static tree
4031 build_vector_type_for_array (tree array_type, tree attribute)
4032 {
4033 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
4034 TYPE_SIZE_UNIT (array_type),
4035 attribute);
4036 if (!vector_type)
4037 return NULL_TREE;
4038
4039 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
4040 return vector_type;
4041 }
4042 \f
4043 /* Build a type to be used to represent an aliased object whose nominal type
4044 is an unconstrained array. This consists of a RECORD_TYPE containing a
4045 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4046 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4047 an arbitrary unconstrained object. Use NAME as the name of the record.
4048 DEBUG_INFO_P is true if we need to write debug information for the type. */
4049
4050 tree
4051 build_unc_object_type (tree template_type, tree object_type, tree name,
4052 bool debug_info_p)
4053 {
4054 tree decl;
4055 tree type = make_node (RECORD_TYPE);
4056 tree template_field
4057 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4058 NULL_TREE, NULL_TREE, 0, 1);
4059 tree array_field
4060 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4061 NULL_TREE, NULL_TREE, 0, 1);
4062
4063 TYPE_NAME (type) = name;
4064 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4065 DECL_CHAIN (template_field) = array_field;
4066 finish_record_type (type, template_field, 0, true);
4067
4068 /* Declare it now since it will never be declared otherwise. This is
4069 necessary to ensure that its subtrees are properly marked. */
4070 decl = create_type_decl (name, type, true, debug_info_p, Empty);
4071
4072 /* template_type will not be used elsewhere than here, so to keep the debug
4073 info clean and in order to avoid scoping issues, make decl its
4074 context. */
4075 gnat_set_type_context (template_type, decl);
4076
4077 return type;
4078 }
4079
4080 /* Same, taking a thin or fat pointer type instead of a template type. */
4081
4082 tree
4083 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4084 tree name, bool debug_info_p)
4085 {
4086 tree template_type;
4087
4088 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4089
4090 template_type
4091 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4092 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4093 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4094
4095 return
4096 build_unc_object_type (template_type, object_type, name, debug_info_p);
4097 }
4098 \f
4099 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4100 In the normal case this is just two adjustments, but we have more to
4101 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4102
4103 void
4104 update_pointer_to (tree old_type, tree new_type)
4105 {
4106 tree ptr = TYPE_POINTER_TO (old_type);
4107 tree ref = TYPE_REFERENCE_TO (old_type);
4108 tree t;
4109
4110 /* If this is the main variant, process all the other variants first. */
4111 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4112 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4113 update_pointer_to (t, new_type);
4114
4115 /* If no pointers and no references, we are done. */
4116 if (!ptr && !ref)
4117 return;
4118
4119 /* Merge the old type qualifiers in the new type.
4120
4121 Each old variant has qualifiers for specific reasons, and the new
4122 designated type as well. Each set of qualifiers represents useful
4123 information grabbed at some point, and merging the two simply unifies
4124 these inputs into the final type description.
4125
4126 Consider for instance a volatile type frozen after an access to constant
4127 type designating it; after the designated type's freeze, we get here with
4128 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4129 when the access type was processed. We will make a volatile and readonly
4130 designated type, because that's what it really is.
4131
4132 We might also get here for a non-dummy OLD_TYPE variant with different
4133 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4134 to private record type elaboration (see the comments around the call to
4135 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4136 the qualifiers in those cases too, to avoid accidentally discarding the
4137 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4138 new_type
4139 = build_qualified_type (new_type,
4140 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4141
4142 /* If old type and new type are identical, there is nothing to do. */
4143 if (old_type == new_type)
4144 return;
4145
4146 /* Otherwise, first handle the simple case. */
4147 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4148 {
4149 tree new_ptr, new_ref;
4150
4151 /* If pointer or reference already points to new type, nothing to do.
4152 This can happen as update_pointer_to can be invoked multiple times
4153 on the same couple of types because of the type variants. */
4154 if ((ptr && TREE_TYPE (ptr) == new_type)
4155 || (ref && TREE_TYPE (ref) == new_type))
4156 return;
4157
4158 /* Chain PTR and its variants at the end. */
4159 new_ptr = TYPE_POINTER_TO (new_type);
4160 if (new_ptr)
4161 {
4162 while (TYPE_NEXT_PTR_TO (new_ptr))
4163 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4164 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4165 }
4166 else
4167 TYPE_POINTER_TO (new_type) = ptr;
4168
4169 /* Now adjust them. */
4170 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4171 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4172 {
4173 TREE_TYPE (t) = new_type;
4174 if (TYPE_NULL_BOUNDS (t))
4175 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4176 }
4177
4178 /* Chain REF and its variants at the end. */
4179 new_ref = TYPE_REFERENCE_TO (new_type);
4180 if (new_ref)
4181 {
4182 while (TYPE_NEXT_REF_TO (new_ref))
4183 new_ref = TYPE_NEXT_REF_TO (new_ref);
4184 TYPE_NEXT_REF_TO (new_ref) = ref;
4185 }
4186 else
4187 TYPE_REFERENCE_TO (new_type) = ref;
4188
4189 /* Now adjust them. */
4190 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4191 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4192 TREE_TYPE (t) = new_type;
4193
4194 TYPE_POINTER_TO (old_type) = NULL_TREE;
4195 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4196 }
4197
4198 /* Now deal with the unconstrained array case. In this case the pointer
4199 is actually a record where both fields are pointers to dummy nodes.
4200 Turn them into pointers to the correct types using update_pointer_to.
4201 Likewise for the pointer to the object record (thin pointer). */
4202 else
4203 {
4204 tree new_ptr = TYPE_POINTER_TO (new_type);
4205
4206 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4207
4208 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4209 since update_pointer_to can be invoked multiple times on the same
4210 couple of types because of the type variants. */
4211 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4212 return;
4213
4214 update_pointer_to
4215 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4216 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4217
4218 update_pointer_to
4219 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4220 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4221
4222 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4223 TYPE_OBJECT_RECORD_TYPE (new_type));
4224
4225 TYPE_POINTER_TO (old_type) = NULL_TREE;
4226 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4227 }
4228 }
4229 \f
4230 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4231 unconstrained one. This involves making or finding a template. */
4232
4233 static tree
4234 convert_to_fat_pointer (tree type, tree expr)
4235 {
4236 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4237 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4238 tree etype = TREE_TYPE (expr);
4239 tree template_addr;
4240 vec<constructor_elt, va_gc> *v;
4241 vec_alloc (v, 2);
4242
4243 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4244 array (compare_fat_pointers ensures that this is the full discriminant)
4245 and a valid pointer to the bounds. This latter property is necessary
4246 since the compiler can hoist the load of the bounds done through it. */
4247 if (integer_zerop (expr))
4248 {
4249 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4250 tree null_bounds, t;
4251
4252 if (TYPE_NULL_BOUNDS (ptr_template_type))
4253 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4254 else
4255 {
4256 /* The template type can still be dummy at this point so we build an
4257 empty constructor. The middle-end will fill it in with zeros. */
4258 t = build_constructor (template_type, NULL);
4259 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4260 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4261 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4262 }
4263
4264 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4265 fold_convert (p_array_type, null_pointer_node));
4266 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4267 t = build_constructor (type, v);
4268 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4269 TREE_CONSTANT (t) = 0;
4270 TREE_STATIC (t) = 1;
4271
4272 return t;
4273 }
4274
4275 /* If EXPR is a thin pointer, make template and data from the record. */
4276 if (TYPE_IS_THIN_POINTER_P (etype))
4277 {
4278 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4279
4280 expr = gnat_protect_expr (expr);
4281
4282 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4283 the thin pointer value has been shifted so we shift it back to get
4284 the template address. */
4285 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4286 {
4287 template_addr
4288 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4289 fold_build1 (NEGATE_EXPR, sizetype,
4290 byte_position
4291 (DECL_CHAIN (field))));
4292 template_addr
4293 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4294 template_addr);
4295 }
4296
4297 /* Otherwise we explicitly take the address of the fields. */
4298 else
4299 {
4300 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4301 template_addr
4302 = build_unary_op (ADDR_EXPR, NULL_TREE,
4303 build_component_ref (expr, field, false));
4304 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4305 build_component_ref (expr, DECL_CHAIN (field),
4306 false));
4307 }
4308 }
4309
4310 /* Otherwise, build the constructor for the template. */
4311 else
4312 template_addr
4313 = build_unary_op (ADDR_EXPR, NULL_TREE,
4314 build_template (template_type, TREE_TYPE (etype),
4315 expr));
4316
4317 /* The final result is a constructor for the fat pointer.
4318
4319 If EXPR is an argument of a foreign convention subprogram, the type it
4320 points to is directly the component type. In this case, the expression
4321 type may not match the corresponding FIELD_DECL type at this point, so we
4322 call "convert" here to fix that up if necessary. This type consistency is
4323 required, for instance because it ensures that possible later folding of
4324 COMPONENT_REFs against this constructor always yields something of the
4325 same type as the initial reference.
4326
4327 Note that the call to "build_template" above is still fine because it
4328 will only refer to the provided TEMPLATE_TYPE in this case. */
4329 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4330 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4331 return gnat_build_constructor (type, v);
4332 }
4333 \f
4334 /* Create an expression whose value is that of EXPR,
4335 converted to type TYPE. The TREE_TYPE of the value
4336 is always TYPE. This function implements all reasonable
4337 conversions; callers should filter out those that are
4338 not permitted by the language being compiled. */
4339
4340 tree
4341 convert (tree type, tree expr)
4342 {
4343 tree etype = TREE_TYPE (expr);
4344 enum tree_code ecode = TREE_CODE (etype);
4345 enum tree_code code = TREE_CODE (type);
4346
4347 /* If the expression is already of the right type, we are done. */
4348 if (etype == type)
4349 return expr;
4350
4351 /* If both input and output have padding and are of variable size, do this
4352 as an unchecked conversion. Likewise if one is a mere variant of the
4353 other, so we avoid a pointless unpad/repad sequence. */
4354 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4355 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4356 && (!TREE_CONSTANT (TYPE_SIZE (type))
4357 || !TREE_CONSTANT (TYPE_SIZE (etype))
4358 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4359 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4360 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4361 ;
4362
4363 /* If the output type has padding, convert to the inner type and make a
4364 constructor to build the record, unless a variable size is involved. */
4365 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4366 {
4367 /* If we previously converted from another type and our type is
4368 of variable size, remove the conversion to avoid the need for
4369 variable-sized temporaries. Likewise for a conversion between
4370 original and packable version. */
4371 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4372 && (!TREE_CONSTANT (TYPE_SIZE (type))
4373 || (ecode == RECORD_TYPE
4374 && TYPE_NAME (etype)
4375 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4376 expr = TREE_OPERAND (expr, 0);
4377
4378 /* If we are just removing the padding from expr, convert the original
4379 object if we have variable size in order to avoid the need for some
4380 variable-sized temporaries. Likewise if the padding is a variant
4381 of the other, so we avoid a pointless unpad/repad sequence. */
4382 if (TREE_CODE (expr) == COMPONENT_REF
4383 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4384 && (!TREE_CONSTANT (TYPE_SIZE (type))
4385 || TYPE_MAIN_VARIANT (type)
4386 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4387 || (ecode == RECORD_TYPE
4388 && TYPE_NAME (etype)
4389 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4390 return convert (type, TREE_OPERAND (expr, 0));
4391
4392 /* If the inner type is of self-referential size and the expression type
4393 is a record, do this as an unchecked conversion unless both types are
4394 essentially the same. */
4395 if (ecode == RECORD_TYPE
4396 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4397 && TYPE_MAIN_VARIANT (etype)
4398 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4399 return unchecked_convert (type, expr, false);
4400
4401 /* If we are converting between array types with variable size, do the
4402 final conversion as an unchecked conversion, again to avoid the need
4403 for some variable-sized temporaries. If valid, this conversion is
4404 very likely purely technical and without real effects. */
4405 if (ecode == ARRAY_TYPE
4406 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4407 && !TREE_CONSTANT (TYPE_SIZE (etype))
4408 && !TREE_CONSTANT (TYPE_SIZE (type)))
4409 return unchecked_convert (type,
4410 convert (TREE_TYPE (TYPE_FIELDS (type)),
4411 expr),
4412 false);
4413
4414 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4415
4416 /* If converting to the inner type has already created a CONSTRUCTOR with
4417 the right size, then reuse it instead of creating another one. This
4418 can happen for the padding type built to overalign local variables. */
4419 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4420 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4421 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4422 && tree_int_cst_equal (TYPE_SIZE (type),
4423 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4424 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4425
4426 vec<constructor_elt, va_gc> *v;
4427 vec_alloc (v, 1);
4428 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4429 return gnat_build_constructor (type, v);
4430 }
4431
4432 /* If the input type has padding, remove it and convert to the output type.
4433 The conditions ordering is arranged to ensure that the output type is not
4434 a padding type here, as it is not clear whether the conversion would
4435 always be correct if this was to happen. */
4436 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4437 {
4438 tree unpadded;
4439
4440 /* If we have just converted to this padded type, just get the
4441 inner expression. */
4442 if (TREE_CODE (expr) == CONSTRUCTOR)
4443 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4444
4445 /* Otherwise, build an explicit component reference. */
4446 else
4447 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4448
4449 return convert (type, unpadded);
4450 }
4451
4452 /* If the input is a biased type, convert first to the base type and add
4453 the bias. Note that the bias must go through a full conversion to the
4454 base type, lest it is itself a biased value; this happens for subtypes
4455 of biased types. */
4456 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4457 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4458 fold_convert (TREE_TYPE (etype), expr),
4459 convert (TREE_TYPE (etype),
4460 TYPE_MIN_VALUE (etype))));
4461
4462 /* If the input is a justified modular type, we need to extract the actual
4463 object before converting it to an other type with the exceptions of an
4464 [unconstrained] array or a mere type variant. It is useful to avoid
4465 the extraction and conversion in these cases because it could end up
4466 replacing a VAR_DECL by a constructor and we might be about the take
4467 the address of the result. */
4468 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4469 && code != ARRAY_TYPE
4470 && code != UNCONSTRAINED_ARRAY_TYPE
4471 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4472 return
4473 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4474
4475 /* If converting to a type that contains a template, convert to the data
4476 type and then build the template. */
4477 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4478 {
4479 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4480 vec<constructor_elt, va_gc> *v;
4481 vec_alloc (v, 2);
4482
4483 /* If the source already has a template, get a reference to the
4484 associated array only, as we are going to rebuild a template
4485 for the target type anyway. */
4486 expr = maybe_unconstrained_array (expr);
4487
4488 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4489 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4490 obj_type, NULL_TREE));
4491 if (expr)
4492 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4493 convert (obj_type, expr));
4494 return gnat_build_constructor (type, v);
4495 }
4496
4497 /* There are some cases of expressions that we process specially. */
4498 switch (TREE_CODE (expr))
4499 {
4500 case ERROR_MARK:
4501 return expr;
4502
4503 case NULL_EXPR:
4504 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4505 conversion in gnat_expand_expr. NULL_EXPR does not represent
4506 and actual value, so no conversion is needed. */
4507 expr = copy_node (expr);
4508 TREE_TYPE (expr) = type;
4509 return expr;
4510
4511 case STRING_CST:
4512 /* If we are converting a STRING_CST to another constrained array type,
4513 just make a new one in the proper type. */
4514 if (code == ecode
4515 && !(TREE_CONSTANT (TYPE_SIZE (etype))
4516 && !TREE_CONSTANT (TYPE_SIZE (type))))
4517 {
4518 expr = copy_node (expr);
4519 TREE_TYPE (expr) = type;
4520 return expr;
4521 }
4522 break;
4523
4524 case VECTOR_CST:
4525 /* If we are converting a VECTOR_CST to a mere type variant, just make
4526 a new one in the proper type. */
4527 if (code == ecode && gnat_types_compatible_p (type, etype))
4528 {
4529 expr = copy_node (expr);
4530 TREE_TYPE (expr) = type;
4531 return expr;
4532 }
4533 break;
4534
4535 case CONSTRUCTOR:
4536 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4537 another padding type around the same type, just make a new one in
4538 the proper type. */
4539 if (code == ecode
4540 && (gnat_types_compatible_p (type, etype)
4541 || (code == RECORD_TYPE
4542 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4543 && TREE_TYPE (TYPE_FIELDS (type))
4544 == TREE_TYPE (TYPE_FIELDS (etype)))))
4545 {
4546 expr = copy_node (expr);
4547 TREE_TYPE (expr) = type;
4548 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4549 return expr;
4550 }
4551
4552 /* Likewise for a conversion between original and packable version, or
4553 conversion between types of the same size and with the same list of
4554 fields, but we have to work harder to preserve type consistency. */
4555 if (code == ecode
4556 && code == RECORD_TYPE
4557 && (TYPE_NAME (type) == TYPE_NAME (etype)
4558 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4559
4560 {
4561 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4562 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4563 vec<constructor_elt, va_gc> *v;
4564 vec_alloc (v, len);
4565 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4566 unsigned HOST_WIDE_INT idx;
4567 tree index, value;
4568
4569 /* Whether we need to clear TREE_CONSTANT et al. on the output
4570 constructor when we convert in place. */
4571 bool clear_constant = false;
4572
4573 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4574 {
4575 /* Skip the missing fields in the CONSTRUCTOR. */
4576 while (efield && field && !SAME_FIELD_P (efield, index))
4577 {
4578 efield = DECL_CHAIN (efield);
4579 field = DECL_CHAIN (field);
4580 }
4581 /* The field must be the same. */
4582 if (!(efield && field && SAME_FIELD_P (efield, field)))
4583 break;
4584 constructor_elt elt
4585 = {field, convert (TREE_TYPE (field), value)};
4586 v->quick_push (elt);
4587
4588 /* If packing has made this field a bitfield and the input
4589 value couldn't be emitted statically any more, we need to
4590 clear TREE_CONSTANT on our output. */
4591 if (!clear_constant
4592 && TREE_CONSTANT (expr)
4593 && !CONSTRUCTOR_BITFIELD_P (efield)
4594 && CONSTRUCTOR_BITFIELD_P (field)
4595 && !initializer_constant_valid_for_bitfield_p (value))
4596 clear_constant = true;
4597
4598 efield = DECL_CHAIN (efield);
4599 field = DECL_CHAIN (field);
4600 }
4601
4602 /* If we have been able to match and convert all the input fields
4603 to their output type, convert in place now. We'll fallback to a
4604 view conversion downstream otherwise. */
4605 if (idx == len)
4606 {
4607 expr = copy_node (expr);
4608 TREE_TYPE (expr) = type;
4609 CONSTRUCTOR_ELTS (expr) = v;
4610 if (clear_constant)
4611 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4612 return expr;
4613 }
4614 }
4615
4616 /* Likewise for a conversion between array type and vector type with a
4617 compatible representative array. */
4618 else if (code == VECTOR_TYPE
4619 && ecode == ARRAY_TYPE
4620 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4621 etype))
4622 {
4623 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4624 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4625 vec<constructor_elt, va_gc> *v;
4626 unsigned HOST_WIDE_INT ix;
4627 tree value;
4628
4629 /* Build a VECTOR_CST from a *constant* array constructor. */
4630 if (TREE_CONSTANT (expr))
4631 {
4632 bool constant_p = true;
4633
4634 /* Iterate through elements and check if all constructor
4635 elements are *_CSTs. */
4636 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4637 if (!CONSTANT_CLASS_P (value))
4638 {
4639 constant_p = false;
4640 break;
4641 }
4642
4643 if (constant_p)
4644 return build_vector_from_ctor (type,
4645 CONSTRUCTOR_ELTS (expr));
4646 }
4647
4648 /* Otherwise, build a regular vector constructor. */
4649 vec_alloc (v, len);
4650 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4651 {
4652 constructor_elt elt = {NULL_TREE, value};
4653 v->quick_push (elt);
4654 }
4655 expr = copy_node (expr);
4656 TREE_TYPE (expr) = type;
4657 CONSTRUCTOR_ELTS (expr) = v;
4658 return expr;
4659 }
4660 break;
4661
4662 case UNCONSTRAINED_ARRAY_REF:
4663 /* First retrieve the underlying array. */
4664 expr = maybe_unconstrained_array (expr);
4665 etype = TREE_TYPE (expr);
4666 ecode = TREE_CODE (etype);
4667 break;
4668
4669 case VIEW_CONVERT_EXPR:
4670 {
4671 /* GCC 4.x is very sensitive to type consistency overall, and view
4672 conversions thus are very frequent. Even though just "convert"ing
4673 the inner operand to the output type is fine in most cases, it
4674 might expose unexpected input/output type mismatches in special
4675 circumstances so we avoid such recursive calls when we can. */
4676 tree op0 = TREE_OPERAND (expr, 0);
4677
4678 /* If we are converting back to the original type, we can just
4679 lift the input conversion. This is a common occurrence with
4680 switches back-and-forth amongst type variants. */
4681 if (type == TREE_TYPE (op0))
4682 return op0;
4683
4684 /* Otherwise, if we're converting between two aggregate or vector
4685 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4686 target type in place or to just convert the inner expression. */
4687 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4688 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4689 {
4690 /* If we are converting between mere variants, we can just
4691 substitute the VIEW_CONVERT_EXPR in place. */
4692 if (gnat_types_compatible_p (type, etype))
4693 return build1 (VIEW_CONVERT_EXPR, type, op0);
4694
4695 /* Otherwise, we may just bypass the input view conversion unless
4696 one of the types is a fat pointer, which is handled by
4697 specialized code below which relies on exact type matching. */
4698 else if (!TYPE_IS_FAT_POINTER_P (type)
4699 && !TYPE_IS_FAT_POINTER_P (etype))
4700 return convert (type, op0);
4701 }
4702
4703 break;
4704 }
4705
4706 default:
4707 break;
4708 }
4709
4710 /* Check for converting to a pointer to an unconstrained array. */
4711 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4712 return convert_to_fat_pointer (type, expr);
4713
4714 /* If we are converting between two aggregate or vector types that are mere
4715 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4716 to a vector type from its representative array type. */
4717 else if ((code == ecode
4718 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4719 && gnat_types_compatible_p (type, etype))
4720 || (code == VECTOR_TYPE
4721 && ecode == ARRAY_TYPE
4722 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4723 etype)))
4724 return build1 (VIEW_CONVERT_EXPR, type, expr);
4725
4726 /* If we are converting between tagged types, try to upcast properly.
4727 But don't do it if we are just annotating types since tagged types
4728 aren't fully laid out in this mode. */
4729 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4730 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4731 && !type_annotate_only)
4732 {
4733 tree child_etype = etype;
4734 do {
4735 tree field = TYPE_FIELDS (child_etype);
4736 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4737 return build_component_ref (expr, field, false);
4738 child_etype = TREE_TYPE (field);
4739 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4740 }
4741
4742 /* If we are converting from a smaller form of record type back to it, just
4743 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4744 size on both sides. */
4745 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4746 && smaller_form_type_p (etype, type))
4747 {
4748 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4749 false, false, false, true),
4750 expr);
4751 return build1 (VIEW_CONVERT_EXPR, type, expr);
4752 }
4753
4754 /* In all other cases of related types, make a NOP_EXPR. */
4755 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4756 return fold_convert (type, expr);
4757
4758 switch (code)
4759 {
4760 case VOID_TYPE:
4761 return fold_build1 (CONVERT_EXPR, type, expr);
4762
4763 case INTEGER_TYPE:
4764 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4765 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4766 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4767 return unchecked_convert (type, expr, false);
4768
4769 /* If the output is a biased type, convert first to the base type and
4770 subtract the bias. Note that the bias itself must go through a full
4771 conversion to the base type, lest it is a biased value; this happens
4772 for subtypes of biased types. */
4773 if (TYPE_BIASED_REPRESENTATION_P (type))
4774 return fold_convert (type,
4775 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4776 convert (TREE_TYPE (type), expr),
4777 convert (TREE_TYPE (type),
4778 TYPE_MIN_VALUE (type))));
4779
4780 /* ... fall through ... */
4781
4782 case ENUMERAL_TYPE:
4783 case BOOLEAN_TYPE:
4784 /* If we are converting an additive expression to an integer type
4785 with lower precision, be wary of the optimization that can be
4786 applied by convert_to_integer. There are 2 problematic cases:
4787 - if the first operand was originally of a biased type,
4788 because we could be recursively called to convert it
4789 to an intermediate type and thus rematerialize the
4790 additive operator endlessly,
4791 - if the expression contains a placeholder, because an
4792 intermediate conversion that changes the sign could
4793 be inserted and thus introduce an artificial overflow
4794 at compile time when the placeholder is substituted. */
4795 if (code == INTEGER_TYPE
4796 && ecode == INTEGER_TYPE
4797 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4798 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4799 {
4800 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4801
4802 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4803 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4804 || CONTAINS_PLACEHOLDER_P (expr))
4805 return build1 (NOP_EXPR, type, expr);
4806 }
4807
4808 return fold (convert_to_integer (type, expr));
4809
4810 case POINTER_TYPE:
4811 case REFERENCE_TYPE:
4812 /* If converting between two thin pointers, adjust if needed to account
4813 for differing offsets from the base pointer, depending on whether
4814 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4815 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4816 {
4817 tree etype_pos
4818 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4819 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4820 : size_zero_node;
4821 tree type_pos
4822 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4823 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4824 : size_zero_node;
4825 tree byte_diff = size_diffop (type_pos, etype_pos);
4826
4827 expr = build1 (NOP_EXPR, type, expr);
4828 if (integer_zerop (byte_diff))
4829 return expr;
4830
4831 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4832 fold_convert (sizetype, byte_diff));
4833 }
4834
4835 /* If converting fat pointer to normal or thin pointer, get the pointer
4836 to the array and then convert it. */
4837 if (TYPE_IS_FAT_POINTER_P (etype))
4838 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4839
4840 return fold (convert_to_pointer (type, expr));
4841
4842 case REAL_TYPE:
4843 return fold (convert_to_real (type, expr));
4844
4845 case RECORD_TYPE:
4846 /* Do a normal conversion between scalar and justified modular type. */
4847 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4848 {
4849 vec<constructor_elt, va_gc> *v;
4850 vec_alloc (v, 1);
4851
4852 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4853 convert (TREE_TYPE (TYPE_FIELDS (type)),
4854 expr));
4855 return gnat_build_constructor (type, v);
4856 }
4857
4858 /* In these cases, assume the front-end has validated the conversion.
4859 If the conversion is valid, it will be a bit-wise conversion, so
4860 it can be viewed as an unchecked conversion. */
4861 return unchecked_convert (type, expr, false);
4862
4863 case ARRAY_TYPE:
4864 /* Do a normal conversion between unconstrained and constrained array
4865 type, assuming the latter is a constrained version of the former. */
4866 if (TREE_CODE (expr) == INDIRECT_REF
4867 && ecode == ARRAY_TYPE
4868 && TREE_TYPE (etype) == TREE_TYPE (type))
4869 {
4870 tree ptr_type = build_pointer_type (type);
4871 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4872 fold_convert (ptr_type,
4873 TREE_OPERAND (expr, 0)));
4874 TREE_READONLY (t) = TREE_READONLY (expr);
4875 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4876 return t;
4877 }
4878
4879 /* In these cases, assume the front-end has validated the conversion.
4880 If the conversion is valid, it will be a bit-wise conversion, so
4881 it can be viewed as an unchecked conversion. */
4882 return unchecked_convert (type, expr, false);
4883
4884 case UNION_TYPE:
4885 /* This is a either a conversion between a tagged type and some
4886 subtype, which we have to mark as a UNION_TYPE because of
4887 overlapping fields or a conversion of an Unchecked_Union. */
4888 return unchecked_convert (type, expr, false);
4889
4890 case UNCONSTRAINED_ARRAY_TYPE:
4891 /* If the input is a VECTOR_TYPE, convert to the representative
4892 array type first. */
4893 if (ecode == VECTOR_TYPE)
4894 {
4895 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4896 etype = TREE_TYPE (expr);
4897 ecode = TREE_CODE (etype);
4898 }
4899
4900 /* If EXPR is a constrained array, take its address, convert it to a
4901 fat pointer, and then dereference it. Likewise if EXPR is a
4902 record containing both a template and a constrained array.
4903 Note that a record representing a justified modular type
4904 always represents a packed constrained array. */
4905 if (ecode == ARRAY_TYPE
4906 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4907 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4908 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4909 return
4910 build_unary_op
4911 (INDIRECT_REF, NULL_TREE,
4912 convert_to_fat_pointer (TREE_TYPE (type),
4913 build_unary_op (ADDR_EXPR,
4914 NULL_TREE, expr)));
4915
4916 /* Do something very similar for converting one unconstrained
4917 array to another. */
4918 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4919 return
4920 build_unary_op (INDIRECT_REF, NULL_TREE,
4921 convert (TREE_TYPE (type),
4922 build_unary_op (ADDR_EXPR,
4923 NULL_TREE, expr)));
4924 else
4925 gcc_unreachable ();
4926
4927 case COMPLEX_TYPE:
4928 return fold (convert_to_complex (type, expr));
4929
4930 default:
4931 gcc_unreachable ();
4932 }
4933 }
4934
4935 /* Create an expression whose value is that of EXPR converted to the common
4936 index type, which is sizetype. EXPR is supposed to be in the base type
4937 of the GNAT index type. Calling it is equivalent to doing
4938
4939 convert (sizetype, expr)
4940
4941 but we try to distribute the type conversion with the knowledge that EXPR
4942 cannot overflow in its type. This is a best-effort approach and we fall
4943 back to the above expression as soon as difficulties are encountered.
4944
4945 This is necessary to overcome issues that arise when the GNAT base index
4946 type and the GCC common index type (sizetype) don't have the same size,
4947 which is quite frequent on 64-bit architectures. In this case, and if
4948 the GNAT base index type is signed but the iteration type of the loop has
4949 been forced to unsigned, the loop scalar evolution engine cannot compute
4950 a simple evolution for the general induction variables associated with the
4951 array indices, because it will preserve the wrap-around semantics in the
4952 unsigned type of their "inner" part. As a result, many loop optimizations
4953 are blocked.
4954
4955 The solution is to use a special (basic) induction variable that is at
4956 least as large as sizetype, and to express the aforementioned general
4957 induction variables in terms of this induction variable, eliminating
4958 the problematic intermediate truncation to the GNAT base index type.
4959 This is possible as long as the original expression doesn't overflow
4960 and if the middle-end hasn't introduced artificial overflows in the
4961 course of the various simplification it can make to the expression. */
4962
4963 tree
4964 convert_to_index_type (tree expr)
4965 {
4966 enum tree_code code = TREE_CODE (expr);
4967 tree type = TREE_TYPE (expr);
4968
4969 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4970 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4971 if (TYPE_UNSIGNED (type) || !optimize)
4972 return convert (sizetype, expr);
4973
4974 switch (code)
4975 {
4976 case VAR_DECL:
4977 /* The main effect of the function: replace a loop parameter with its
4978 associated special induction variable. */
4979 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4980 expr = DECL_INDUCTION_VAR (expr);
4981 break;
4982
4983 CASE_CONVERT:
4984 {
4985 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4986 /* Bail out as soon as we suspect some sort of type frobbing. */
4987 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4988 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4989 break;
4990 }
4991
4992 /* ... fall through ... */
4993
4994 case NON_LVALUE_EXPR:
4995 return fold_build1 (code, sizetype,
4996 convert_to_index_type (TREE_OPERAND (expr, 0)));
4997
4998 case PLUS_EXPR:
4999 case MINUS_EXPR:
5000 case MULT_EXPR:
5001 return fold_build2 (code, sizetype,
5002 convert_to_index_type (TREE_OPERAND (expr, 0)),
5003 convert_to_index_type (TREE_OPERAND (expr, 1)));
5004
5005 case COMPOUND_EXPR:
5006 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5007 convert_to_index_type (TREE_OPERAND (expr, 1)));
5008
5009 case COND_EXPR:
5010 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5011 convert_to_index_type (TREE_OPERAND (expr, 1)),
5012 convert_to_index_type (TREE_OPERAND (expr, 2)));
5013
5014 default:
5015 break;
5016 }
5017
5018 return convert (sizetype, expr);
5019 }
5020 \f
5021 /* Remove all conversions that are done in EXP. This includes converting
5022 from a padded type or to a justified modular type. If TRUE_ADDRESS
5023 is true, always return the address of the containing object even if
5024 the address is not bit-aligned. */
5025
5026 tree
5027 remove_conversions (tree exp, bool true_address)
5028 {
5029 switch (TREE_CODE (exp))
5030 {
5031 case CONSTRUCTOR:
5032 if (true_address
5033 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5034 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5035 return
5036 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
5037 break;
5038
5039 case COMPONENT_REF:
5040 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5041 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5042 break;
5043
5044 CASE_CONVERT:
5045 case VIEW_CONVERT_EXPR:
5046 case NON_LVALUE_EXPR:
5047 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5048
5049 default:
5050 break;
5051 }
5052
5053 return exp;
5054 }
5055 \f
5056 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5057 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5058 likewise return an expression pointing to the underlying array. */
5059
5060 tree
5061 maybe_unconstrained_array (tree exp)
5062 {
5063 enum tree_code code = TREE_CODE (exp);
5064 tree type = TREE_TYPE (exp);
5065
5066 switch (TREE_CODE (type))
5067 {
5068 case UNCONSTRAINED_ARRAY_TYPE:
5069 if (code == UNCONSTRAINED_ARRAY_REF)
5070 {
5071 const bool read_only = TREE_READONLY (exp);
5072 const bool no_trap = TREE_THIS_NOTRAP (exp);
5073
5074 exp = TREE_OPERAND (exp, 0);
5075 type = TREE_TYPE (exp);
5076
5077 if (TREE_CODE (exp) == COND_EXPR)
5078 {
5079 tree op1
5080 = build_unary_op (INDIRECT_REF, NULL_TREE,
5081 build_component_ref (TREE_OPERAND (exp, 1),
5082 TYPE_FIELDS (type),
5083 false));
5084 tree op2
5085 = build_unary_op (INDIRECT_REF, NULL_TREE,
5086 build_component_ref (TREE_OPERAND (exp, 2),
5087 TYPE_FIELDS (type),
5088 false));
5089
5090 exp = build3 (COND_EXPR,
5091 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5092 TREE_OPERAND (exp, 0), op1, op2);
5093 }
5094 else
5095 {
5096 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5097 build_component_ref (exp,
5098 TYPE_FIELDS (type),
5099 false));
5100 TREE_READONLY (exp) = read_only;
5101 TREE_THIS_NOTRAP (exp) = no_trap;
5102 }
5103 }
5104
5105 else if (code == NULL_EXPR)
5106 exp = build1 (NULL_EXPR,
5107 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5108 TREE_OPERAND (exp, 0));
5109 break;
5110
5111 case RECORD_TYPE:
5112 /* If this is a padded type and it contains a template, convert to the
5113 unpadded type first. */
5114 if (TYPE_PADDING_P (type)
5115 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5116 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5117 {
5118 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5119 code = TREE_CODE (exp);
5120 type = TREE_TYPE (exp);
5121 }
5122
5123 if (TYPE_CONTAINS_TEMPLATE_P (type))
5124 {
5125 /* If the array initializer is a box, return NULL_TREE. */
5126 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5127 return NULL_TREE;
5128
5129 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5130 false);
5131 type = TREE_TYPE (exp);
5132
5133 /* If the array type is padded, convert to the unpadded type. */
5134 if (TYPE_IS_PADDING_P (type))
5135 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5136 }
5137 break;
5138
5139 default:
5140 break;
5141 }
5142
5143 return exp;
5144 }
5145 \f
5146 /* Return true if EXPR is an expression that can be folded as an operand
5147 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5148
5149 static bool
5150 can_fold_for_view_convert_p (tree expr)
5151 {
5152 tree t1, t2;
5153
5154 /* The folder will fold NOP_EXPRs between integral types with the same
5155 precision (in the middle-end's sense). We cannot allow it if the
5156 types don't have the same precision in the Ada sense as well. */
5157 if (TREE_CODE (expr) != NOP_EXPR)
5158 return true;
5159
5160 t1 = TREE_TYPE (expr);
5161 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5162
5163 /* Defer to the folder for non-integral conversions. */
5164 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5165 return true;
5166
5167 /* Only fold conversions that preserve both precisions. */
5168 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5169 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5170 return true;
5171
5172 return false;
5173 }
5174
5175 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5176 If NOTRUNC_P is true, truncation operations should be suppressed.
5177
5178 Special care is required with (source or target) integral types whose
5179 precision is not equal to their size, to make sure we fetch or assign
5180 the value bits whose location might depend on the endianness, e.g.
5181
5182 Rmsize : constant := 8;
5183 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5184
5185 type Bit_Array is array (1 .. Rmsize) of Boolean;
5186 pragma Pack (Bit_Array);
5187
5188 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5189
5190 Value : Int := 2#1000_0001#;
5191 Vbits : Bit_Array := To_Bit_Array (Value);
5192
5193 we expect the 8 bits at Vbits'Address to always contain Value, while
5194 their original location depends on the endianness, at Value'Address
5195 on a little-endian architecture but not on a big-endian one.
5196
5197 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5198 the bits between the precision and the size are filled, because of the
5199 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5200 So we use the special predicate type_unsigned_for_rm above. */
5201
5202 tree
5203 unchecked_convert (tree type, tree expr, bool notrunc_p)
5204 {
5205 tree etype = TREE_TYPE (expr);
5206 enum tree_code ecode = TREE_CODE (etype);
5207 enum tree_code code = TREE_CODE (type);
5208 const bool ebiased
5209 = (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype));
5210 const bool biased
5211 = (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type));
5212 const bool ereverse
5213 = (AGGREGATE_TYPE_P (etype) && TYPE_REVERSE_STORAGE_ORDER (etype));
5214 const bool reverse
5215 = (AGGREGATE_TYPE_P (type) && TYPE_REVERSE_STORAGE_ORDER (type));
5216 tree tem;
5217 int c = 0;
5218
5219 /* If the expression is already of the right type, we are done. */
5220 if (etype == type)
5221 return expr;
5222
5223 /* If both types are integral just do a normal conversion.
5224 Likewise for a conversion to an unconstrained array. */
5225 if (((INTEGRAL_TYPE_P (type)
5226 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5227 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5228 && (INTEGRAL_TYPE_P (etype)
5229 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5230 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5231 || code == UNCONSTRAINED_ARRAY_TYPE)
5232 {
5233 if (ebiased)
5234 {
5235 tree ntype = copy_type (etype);
5236 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5237 TYPE_MAIN_VARIANT (ntype) = ntype;
5238 expr = build1 (NOP_EXPR, ntype, expr);
5239 }
5240
5241 if (biased)
5242 {
5243 tree rtype = copy_type (type);
5244 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5245 TYPE_MAIN_VARIANT (rtype) = rtype;
5246 expr = convert (rtype, expr);
5247 expr = build1 (NOP_EXPR, type, expr);
5248 }
5249 else
5250 expr = convert (type, expr);
5251 }
5252
5253 /* If we are converting to an integral type whose precision is not equal
5254 to its size, first unchecked convert to a record type that contains a
5255 field of the given precision. Then extract the result from the field.
5256
5257 There is a subtlety if the source type is an aggregate type with reverse
5258 storage order because its representation is not contiguous in the native
5259 storage order, i.e. a direct unchecked conversion to an integral type
5260 with N bits of precision cannot read the first N bits of the aggregate
5261 type. To overcome it, we do an unchecked conversion to an integral type
5262 with reverse storage order and return the resulting value. This also
5263 ensures that the result of the unchecked conversion doesn't depend on
5264 the endianness of the target machine, but only on the storage order of
5265 the aggregate type.
5266
5267 Finally, for the sake of consistency, we do the unchecked conversion
5268 to an integral type with reverse storage order as soon as the source
5269 type is an aggregate type with reverse storage order, even if there
5270 are no considerations of precision or size involved. Ultimately, we
5271 further extend this processing to any scalar type. */
5272 else if ((INTEGRAL_TYPE_P (type)
5273 && TYPE_RM_SIZE (type)
5274 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (type),
5275 TYPE_SIZE (type))) < 0
5276 || ereverse))
5277 || (SCALAR_FLOAT_TYPE_P (type) && ereverse))
5278 {
5279 tree rec_type = make_node (RECORD_TYPE);
5280 tree field_type, field;
5281
5282 TYPE_REVERSE_STORAGE_ORDER (rec_type) = ereverse;
5283
5284 if (c < 0)
5285 {
5286 const unsigned HOST_WIDE_INT prec
5287 = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5288 if (type_unsigned_for_rm (type))
5289 field_type = make_unsigned_type (prec);
5290 else
5291 field_type = make_signed_type (prec);
5292 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5293 }
5294 else
5295 field_type = type;
5296
5297 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5298 NULL_TREE, bitsize_zero_node, c < 0, 0);
5299
5300 finish_record_type (rec_type, field, 1, false);
5301
5302 expr = unchecked_convert (rec_type, expr, notrunc_p);
5303 expr = build_component_ref (expr, field, false);
5304 expr = fold_build1 (NOP_EXPR, type, expr);
5305 }
5306
5307 /* Similarly if we are converting from an integral type whose precision is
5308 not equal to its size, first copy into a field of the given precision
5309 and unchecked convert the record type.
5310
5311 The same considerations as above apply if the target type is an aggregate
5312 type with reverse storage order and we also proceed similarly. */
5313 else if ((INTEGRAL_TYPE_P (etype)
5314 && TYPE_RM_SIZE (etype)
5315 && ((c = tree_int_cst_compare (TYPE_RM_SIZE (etype),
5316 TYPE_SIZE (etype))) < 0
5317 || reverse))
5318 || (SCALAR_FLOAT_TYPE_P (etype) && reverse))
5319 {
5320 tree rec_type = make_node (RECORD_TYPE);
5321 vec<constructor_elt, va_gc> *v;
5322 vec_alloc (v, 1);
5323 tree field_type, field;
5324
5325 TYPE_REVERSE_STORAGE_ORDER (rec_type) = reverse;
5326
5327 if (c < 0)
5328 {
5329 const unsigned HOST_WIDE_INT prec
5330 = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5331 if (type_unsigned_for_rm (etype))
5332 field_type = make_unsigned_type (prec);
5333 else
5334 field_type = make_signed_type (prec);
5335 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5336 }
5337 else
5338 field_type = etype;
5339
5340 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5341 NULL_TREE, bitsize_zero_node, c < 0, 0);
5342
5343 finish_record_type (rec_type, field, 1, false);
5344
5345 expr = fold_build1 (NOP_EXPR, field_type, expr);
5346 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5347 expr = gnat_build_constructor (rec_type, v);
5348 expr = unchecked_convert (type, expr, notrunc_p);
5349 }
5350
5351 /* If we are converting from a scalar type to a type with a different size,
5352 we need to pad to have the same size on both sides.
5353
5354 ??? We cannot do it unconditionally because unchecked conversions are
5355 used liberally by the front-end to implement polymorphism, e.g. in:
5356
5357 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5358 return p___size__4 (p__object!(S191s.all));
5359
5360 so we skip all expressions that are references. */
5361 else if (!REFERENCE_CLASS_P (expr)
5362 && !AGGREGATE_TYPE_P (etype)
5363 && TREE_CONSTANT (TYPE_SIZE (type))
5364 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5365 {
5366 if (c < 0)
5367 {
5368 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5369 false, false, false, true),
5370 expr);
5371 expr = unchecked_convert (type, expr, notrunc_p);
5372 }
5373 else
5374 {
5375 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5376 false, false, false, true);
5377 expr = unchecked_convert (rec_type, expr, notrunc_p);
5378 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5379 }
5380 }
5381
5382 /* We have a special case when we are converting between two unconstrained
5383 array types. In that case, take the address, convert the fat pointer
5384 types, and dereference. */
5385 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5386 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5387 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5388 build_unary_op (ADDR_EXPR, NULL_TREE,
5389 expr)));
5390
5391 /* Another special case is when we are converting to a vector type from its
5392 representative array type; this a regular conversion. */
5393 else if (code == VECTOR_TYPE
5394 && ecode == ARRAY_TYPE
5395 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5396 etype))
5397 expr = convert (type, expr);
5398
5399 /* And, if the array type is not the representative, we try to build an
5400 intermediate vector type of which the array type is the representative
5401 and to do the unchecked conversion between the vector types, in order
5402 to enable further simplifications in the middle-end. */
5403 else if (code == VECTOR_TYPE
5404 && ecode == ARRAY_TYPE
5405 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5406 {
5407 expr = convert (tem, expr);
5408 return unchecked_convert (type, expr, notrunc_p);
5409 }
5410
5411 /* If we are converting a CONSTRUCTOR to a more aligned aggregate type, bump
5412 the alignment of the CONSTRUCTOR to speed up the copy operation. But do
5413 not do it for a conversion between original and packable version to avoid
5414 an infinite recursion. */
5415 else if (TREE_CODE (expr) == CONSTRUCTOR
5416 && AGGREGATE_TYPE_P (type)
5417 && TYPE_NAME (type) != TYPE_NAME (etype)
5418 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5419 {
5420 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5421 Empty, false, false, false, true),
5422 expr);
5423 return unchecked_convert (type, expr, notrunc_p);
5424 }
5425
5426 /* If we are converting a CONSTRUCTOR to a larger aggregate type, bump the
5427 size of the CONSTRUCTOR to make sure there are enough allocated bytes.
5428 But do not do it for a conversion between original and packable version
5429 to avoid an infinite recursion. */
5430 else if (TREE_CODE (expr) == CONSTRUCTOR
5431 && AGGREGATE_TYPE_P (type)
5432 && TYPE_NAME (type) != TYPE_NAME (etype)
5433 && TREE_CONSTANT (TYPE_SIZE (type))
5434 && (!TREE_CONSTANT (TYPE_SIZE (etype))
5435 || tree_int_cst_lt (TYPE_SIZE (etype), TYPE_SIZE (type))))
5436 {
5437 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0,
5438 Empty, false, false, false, true),
5439 expr);
5440 return unchecked_convert (type, expr, notrunc_p);
5441 }
5442
5443 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5444 else
5445 {
5446 expr = maybe_unconstrained_array (expr);
5447 etype = TREE_TYPE (expr);
5448 ecode = TREE_CODE (etype);
5449 if (can_fold_for_view_convert_p (expr))
5450 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5451 else
5452 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5453 }
5454
5455 /* If the result is a non-biased integral type whose precision is not equal
5456 to its size, sign- or zero-extend the result. But we need not do this
5457 if the input is also an integral type and both are unsigned or both are
5458 signed and have the same precision. */
5459 tree type_rm_size;
5460 if (!notrunc_p
5461 && !biased
5462 && INTEGRAL_TYPE_P (type)
5463 && (type_rm_size = TYPE_RM_SIZE (type))
5464 && tree_int_cst_compare (type_rm_size, TYPE_SIZE (type)) < 0
5465 && !(INTEGRAL_TYPE_P (etype)
5466 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5467 && (type_unsigned_for_rm (type)
5468 || tree_int_cst_compare (type_rm_size,
5469 TYPE_RM_SIZE (etype)
5470 ? TYPE_RM_SIZE (etype)
5471 : TYPE_SIZE (etype)) == 0)))
5472 {
5473 if (integer_zerop (type_rm_size))
5474 expr = build_int_cst (type, 0);
5475 else
5476 {
5477 tree base_type
5478 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5479 type_unsigned_for_rm (type));
5480 tree shift_expr
5481 = convert (base_type,
5482 size_binop (MINUS_EXPR,
5483 TYPE_SIZE (type), type_rm_size));
5484 expr
5485 = convert (type,
5486 build_binary_op (RSHIFT_EXPR, base_type,
5487 build_binary_op (LSHIFT_EXPR, base_type,
5488 convert (base_type,
5489 expr),
5490 shift_expr),
5491 shift_expr));
5492 }
5493 }
5494
5495 /* An unchecked conversion should never raise Constraint_Error. The code
5496 below assumes that GCC's conversion routines overflow the same way that
5497 the underlying hardware does. This is probably true. In the rare case
5498 when it is false, we can rely on the fact that such conversions are
5499 erroneous anyway. */
5500 if (TREE_CODE (expr) == INTEGER_CST)
5501 TREE_OVERFLOW (expr) = 0;
5502
5503 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5504 show no longer constant. */
5505 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5506 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5507 OEP_ONLY_CONST))
5508 TREE_CONSTANT (expr) = 0;
5509
5510 return expr;
5511 }
5512 \f
5513 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5514 the latter being a record type as predicated by Is_Record_Type. */
5515
5516 enum tree_code
5517 tree_code_for_record_type (Entity_Id gnat_type)
5518 {
5519 Node_Id component_list, component;
5520
5521 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5522 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5523 if (!Is_Unchecked_Union (gnat_type))
5524 return RECORD_TYPE;
5525
5526 gnat_type = Implementation_Base_Type (gnat_type);
5527 component_list
5528 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5529
5530 for (component = First_Non_Pragma (Component_Items (component_list));
5531 Present (component);
5532 component = Next_Non_Pragma (component))
5533 if (Ekind (Defining_Entity (component)) == E_Component)
5534 return RECORD_TYPE;
5535
5536 return UNION_TYPE;
5537 }
5538
5539 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5540 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5541 according to the presence of an alignment clause on the type or, if it
5542 is an array, on the component type. */
5543
5544 bool
5545 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5546 {
5547 gnat_type = Underlying_Type (gnat_type);
5548
5549 *align_clause = Present (Alignment_Clause (gnat_type));
5550
5551 if (Is_Array_Type (gnat_type))
5552 {
5553 gnat_type = Underlying_Type (Component_Type (gnat_type));
5554 if (Present (Alignment_Clause (gnat_type)))
5555 *align_clause = true;
5556 }
5557
5558 if (!Is_Floating_Point_Type (gnat_type))
5559 return false;
5560
5561 if (UI_To_Int (Esize (gnat_type)) != 64)
5562 return false;
5563
5564 return true;
5565 }
5566
5567 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5568 size is greater or equal to 64 bits, or an array of such a type. Set
5569 ALIGN_CLAUSE according to the presence of an alignment clause on the
5570 type or, if it is an array, on the component type. */
5571
5572 bool
5573 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5574 {
5575 gnat_type = Underlying_Type (gnat_type);
5576
5577 *align_clause = Present (Alignment_Clause (gnat_type));
5578
5579 if (Is_Array_Type (gnat_type))
5580 {
5581 gnat_type = Underlying_Type (Component_Type (gnat_type));
5582 if (Present (Alignment_Clause (gnat_type)))
5583 *align_clause = true;
5584 }
5585
5586 if (!Is_Scalar_Type (gnat_type))
5587 return false;
5588
5589 if (UI_To_Int (Esize (gnat_type)) < 64)
5590 return false;
5591
5592 return true;
5593 }
5594
5595 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5596 component of an aggregate type. */
5597
5598 bool
5599 type_for_nonaliased_component_p (tree gnu_type)
5600 {
5601 /* If the type is passed by reference, we may have pointers to the
5602 component so it cannot be made non-aliased. */
5603 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5604 return false;
5605
5606 /* We used to say that any component of aggregate type is aliased
5607 because the front-end may take 'Reference of it. The front-end
5608 has been enhanced in the meantime so as to use a renaming instead
5609 in most cases, but the back-end can probably take the address of
5610 such a component too so we go for the conservative stance.
5611
5612 For instance, we might need the address of any array type, even
5613 if normally passed by copy, to construct a fat pointer if the
5614 component is used as an actual for an unconstrained formal.
5615
5616 Likewise for record types: even if a specific record subtype is
5617 passed by copy, the parent type might be passed by ref (e.g. if
5618 it's of variable size) and we might take the address of a child
5619 component to pass to a parent formal. We have no way to check
5620 for such conditions here. */
5621 if (AGGREGATE_TYPE_P (gnu_type))
5622 return false;
5623
5624 return true;
5625 }
5626
5627 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5628
5629 bool
5630 smaller_form_type_p (tree type, tree orig_type)
5631 {
5632 tree size, osize;
5633
5634 /* We're not interested in variants here. */
5635 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5636 return false;
5637
5638 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5639 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5640 return false;
5641
5642 size = TYPE_SIZE (type);
5643 osize = TYPE_SIZE (orig_type);
5644
5645 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5646 return false;
5647
5648 return tree_int_cst_lt (size, osize) != 0;
5649 }
5650
5651 /* Return whether EXPR, which is the renamed object in an object renaming
5652 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5653 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5654
5655 bool
5656 can_materialize_object_renaming_p (Node_Id expr)
5657 {
5658 while (true)
5659 {
5660 expr = Original_Node (expr);
5661
5662 switch Nkind (expr)
5663 {
5664 case N_Identifier:
5665 case N_Expanded_Name:
5666 if (!Present (Renamed_Object (Entity (expr))))
5667 return true;
5668 expr = Renamed_Object (Entity (expr));
5669 break;
5670
5671 case N_Selected_Component:
5672 {
5673 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5674 return false;
5675
5676 const Uint bitpos
5677 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5678 if (!UI_Is_In_Int_Range (bitpos)
5679 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5680 return false;
5681
5682 expr = Prefix (expr);
5683 break;
5684 }
5685
5686 case N_Indexed_Component:
5687 case N_Slice:
5688 {
5689 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5690
5691 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5692 return false;
5693
5694 expr = Prefix (expr);
5695 break;
5696 }
5697
5698 case N_Explicit_Dereference:
5699 expr = Prefix (expr);
5700 break;
5701
5702 default:
5703 return true;
5704 };
5705 }
5706 }
5707
5708 /* Perform final processing on global declarations. */
5709
5710 static GTY (()) tree dummy_global;
5711
5712 void
5713 gnat_write_global_declarations (void)
5714 {
5715 unsigned int i;
5716 tree iter;
5717
5718 /* If we have declared types as used at the global level, insert them in
5719 the global hash table. We use a dummy variable for this purpose, but
5720 we need to build it unconditionally to avoid -fcompare-debug issues. */
5721 if (first_global_object_name)
5722 {
5723 struct varpool_node *node;
5724 char *label;
5725
5726 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5727 dummy_global
5728 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5729 void_type_node);
5730 DECL_HARD_REGISTER (dummy_global) = 1;
5731 TREE_STATIC (dummy_global) = 1;
5732 node = varpool_node::get_create (dummy_global);
5733 node->definition = 1;
5734 node->force_output = 1;
5735
5736 if (types_used_by_cur_var_decl)
5737 while (!types_used_by_cur_var_decl->is_empty ())
5738 {
5739 tree t = types_used_by_cur_var_decl->pop ();
5740 types_used_by_var_decl_insert (t, dummy_global);
5741 }
5742 }
5743
5744 /* Output debug information for all global type declarations first. This
5745 ensures that global types whose compilation hasn't been finalized yet,
5746 for example pointers to Taft amendment types, have their compilation
5747 finalized in the right context. */
5748 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5749 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5750 debug_hooks->type_decl (iter, false);
5751
5752 /* Output imported functions. */
5753 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5754 if (TREE_CODE (iter) == FUNCTION_DECL
5755 && DECL_EXTERNAL (iter)
5756 && DECL_INITIAL (iter) == NULL
5757 && !DECL_IGNORED_P (iter)
5758 && DECL_FUNCTION_IS_DEF (iter))
5759 debug_hooks->early_global_decl (iter);
5760
5761 /* Then output the global variables. We need to do that after the debug
5762 information for global types is emitted so that they are finalized. Skip
5763 external global variables, unless we need to emit debug info for them:
5764 this is useful for imported variables, for instance. */
5765 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5766 if (TREE_CODE (iter) == VAR_DECL
5767 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5768 rest_of_decl_compilation (iter, true, 0);
5769
5770 /* Output the imported modules/declarations. In GNAT, these are only
5771 materializing subprogram. */
5772 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5773 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5774 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5775 DECL_CONTEXT (iter), false, false);
5776 }
5777
5778 /* ************************************************************************
5779 * * GCC builtins support *
5780 * ************************************************************************ */
5781
5782 /* The general scheme is fairly simple:
5783
5784 For each builtin function/type to be declared, gnat_install_builtins calls
5785 internal facilities which eventually get to gnat_pushdecl, which in turn
5786 tracks the so declared builtin function decls in the 'builtin_decls' global
5787 datastructure. When an Intrinsic subprogram declaration is processed, we
5788 search this global datastructure to retrieve the associated BUILT_IN DECL
5789 node. */
5790
5791 /* Search the chain of currently available builtin declarations for a node
5792 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5793 found, if any, or NULL_TREE otherwise. */
5794 tree
5795 builtin_decl_for (tree name)
5796 {
5797 unsigned i;
5798 tree decl;
5799
5800 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5801 if (DECL_NAME (decl) == name)
5802 return decl;
5803
5804 return NULL_TREE;
5805 }
5806
5807 /* The code below eventually exposes gnat_install_builtins, which declares
5808 the builtin types and functions we might need, either internally or as
5809 user accessible facilities.
5810
5811 ??? This is a first implementation shot, still in rough shape. It is
5812 heavily inspired from the "C" family implementation, with chunks copied
5813 verbatim from there.
5814
5815 Two obvious improvement candidates are:
5816 o Use a more efficient name/decl mapping scheme
5817 o Devise a middle-end infrastructure to avoid having to copy
5818 pieces between front-ends. */
5819
5820 /* ----------------------------------------------------------------------- *
5821 * BUILTIN ELEMENTARY TYPES *
5822 * ----------------------------------------------------------------------- */
5823
5824 /* Standard data types to be used in builtin argument declarations. */
5825
5826 enum c_tree_index
5827 {
5828 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5829 CTI_STRING_TYPE,
5830 CTI_CONST_STRING_TYPE,
5831
5832 CTI_MAX
5833 };
5834
5835 static tree c_global_trees[CTI_MAX];
5836
5837 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5838 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5839 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5840
5841 /* ??? In addition some attribute handlers, we currently don't support a
5842 (small) number of builtin-types, which in turns inhibits support for a
5843 number of builtin functions. */
5844 #define wint_type_node void_type_node
5845 #define intmax_type_node void_type_node
5846 #define uintmax_type_node void_type_node
5847
5848 /* Used to help initialize the builtin-types.def table. When a type of
5849 the correct size doesn't exist, use error_mark_node instead of NULL.
5850 The later results in segfaults even when a decl using the type doesn't
5851 get invoked. */
5852
5853 static tree
5854 builtin_type_for_size (int size, bool unsignedp)
5855 {
5856 tree type = gnat_type_for_size (size, unsignedp);
5857 return type ? type : error_mark_node;
5858 }
5859
5860 /* Build/push the elementary type decls that builtin functions/types
5861 will need. */
5862
5863 static void
5864 install_builtin_elementary_types (void)
5865 {
5866 signed_size_type_node = gnat_signed_type_for (size_type_node);
5867 pid_type_node = integer_type_node;
5868
5869 string_type_node = build_pointer_type (char_type_node);
5870 const_string_type_node
5871 = build_pointer_type (build_qualified_type
5872 (char_type_node, TYPE_QUAL_CONST));
5873 }
5874
5875 /* ----------------------------------------------------------------------- *
5876 * BUILTIN FUNCTION TYPES *
5877 * ----------------------------------------------------------------------- */
5878
5879 /* Now, builtin function types per se. */
5880
5881 enum c_builtin_type
5882 {
5883 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5884 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5885 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5886 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5887 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5888 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5889 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5890 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5891 ARG6) NAME,
5892 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5893 ARG6, ARG7) NAME,
5894 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5895 ARG6, ARG7, ARG8) NAME,
5896 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5897 ARG6, ARG7, ARG8, ARG9) NAME,
5898 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5899 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5900 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5901 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5902 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5903 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5904 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5905 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5906 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5907 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5908 NAME,
5909 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5910 ARG6) NAME,
5911 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5912 ARG6, ARG7) NAME,
5913 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5914 #include "builtin-types.def"
5915 #include "ada-builtin-types.def"
5916 #undef DEF_PRIMITIVE_TYPE
5917 #undef DEF_FUNCTION_TYPE_0
5918 #undef DEF_FUNCTION_TYPE_1
5919 #undef DEF_FUNCTION_TYPE_2
5920 #undef DEF_FUNCTION_TYPE_3
5921 #undef DEF_FUNCTION_TYPE_4
5922 #undef DEF_FUNCTION_TYPE_5
5923 #undef DEF_FUNCTION_TYPE_6
5924 #undef DEF_FUNCTION_TYPE_7
5925 #undef DEF_FUNCTION_TYPE_8
5926 #undef DEF_FUNCTION_TYPE_9
5927 #undef DEF_FUNCTION_TYPE_10
5928 #undef DEF_FUNCTION_TYPE_11
5929 #undef DEF_FUNCTION_TYPE_VAR_0
5930 #undef DEF_FUNCTION_TYPE_VAR_1
5931 #undef DEF_FUNCTION_TYPE_VAR_2
5932 #undef DEF_FUNCTION_TYPE_VAR_3
5933 #undef DEF_FUNCTION_TYPE_VAR_4
5934 #undef DEF_FUNCTION_TYPE_VAR_5
5935 #undef DEF_FUNCTION_TYPE_VAR_6
5936 #undef DEF_FUNCTION_TYPE_VAR_7
5937 #undef DEF_POINTER_TYPE
5938 BT_LAST
5939 };
5940
5941 typedef enum c_builtin_type builtin_type;
5942
5943 /* A temporary array used in communication with def_fn_type. */
5944 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5945
5946 /* A helper function for install_builtin_types. Build function type
5947 for DEF with return type RET and N arguments. If VAR is true, then the
5948 function should be variadic after those N arguments.
5949
5950 Takes special care not to ICE if any of the types involved are
5951 error_mark_node, which indicates that said type is not in fact available
5952 (see builtin_type_for_size). In which case the function type as a whole
5953 should be error_mark_node. */
5954
5955 static void
5956 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5957 {
5958 tree t;
5959 tree *args = XALLOCAVEC (tree, n);
5960 va_list list;
5961 int i;
5962
5963 va_start (list, n);
5964 for (i = 0; i < n; ++i)
5965 {
5966 builtin_type a = (builtin_type) va_arg (list, int);
5967 t = builtin_types[a];
5968 if (t == error_mark_node)
5969 goto egress;
5970 args[i] = t;
5971 }
5972
5973 t = builtin_types[ret];
5974 if (t == error_mark_node)
5975 goto egress;
5976 if (var)
5977 t = build_varargs_function_type_array (t, n, args);
5978 else
5979 t = build_function_type_array (t, n, args);
5980
5981 egress:
5982 builtin_types[def] = t;
5983 va_end (list);
5984 }
5985
5986 /* Build the builtin function types and install them in the builtin_types
5987 array for later use in builtin function decls. */
5988
5989 static void
5990 install_builtin_function_types (void)
5991 {
5992 tree va_list_ref_type_node;
5993 tree va_list_arg_type_node;
5994
5995 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5996 {
5997 va_list_arg_type_node = va_list_ref_type_node =
5998 build_pointer_type (TREE_TYPE (va_list_type_node));
5999 }
6000 else
6001 {
6002 va_list_arg_type_node = va_list_type_node;
6003 va_list_ref_type_node = build_reference_type (va_list_type_node);
6004 }
6005
6006 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6007 builtin_types[ENUM] = VALUE;
6008 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6009 def_fn_type (ENUM, RETURN, 0, 0);
6010 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6011 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6012 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6013 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6014 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6015 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6016 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6017 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6018 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6019 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6020 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6021 ARG6) \
6022 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6023 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6024 ARG6, ARG7) \
6025 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6026 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6027 ARG6, ARG7, ARG8) \
6028 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6029 ARG7, ARG8);
6030 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6031 ARG6, ARG7, ARG8, ARG9) \
6032 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6033 ARG7, ARG8, ARG9);
6034 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6035 ARG6, ARG7, ARG8, ARG9, ARG10) \
6036 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6037 ARG7, ARG8, ARG9, ARG10);
6038 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
6039 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
6040 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6041 ARG7, ARG8, ARG9, ARG10, ARG11);
6042 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6043 def_fn_type (ENUM, RETURN, 1, 0);
6044 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6045 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6046 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6047 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6048 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6049 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6050 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6051 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6052 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6053 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6054 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6055 ARG6) \
6056 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6057 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6058 ARG6, ARG7) \
6059 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6060 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6061 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6062
6063 #include "builtin-types.def"
6064 #include "ada-builtin-types.def"
6065
6066 #undef DEF_PRIMITIVE_TYPE
6067 #undef DEF_FUNCTION_TYPE_0
6068 #undef DEF_FUNCTION_TYPE_1
6069 #undef DEF_FUNCTION_TYPE_2
6070 #undef DEF_FUNCTION_TYPE_3
6071 #undef DEF_FUNCTION_TYPE_4
6072 #undef DEF_FUNCTION_TYPE_5
6073 #undef DEF_FUNCTION_TYPE_6
6074 #undef DEF_FUNCTION_TYPE_7
6075 #undef DEF_FUNCTION_TYPE_8
6076 #undef DEF_FUNCTION_TYPE_9
6077 #undef DEF_FUNCTION_TYPE_10
6078 #undef DEF_FUNCTION_TYPE_11
6079 #undef DEF_FUNCTION_TYPE_VAR_0
6080 #undef DEF_FUNCTION_TYPE_VAR_1
6081 #undef DEF_FUNCTION_TYPE_VAR_2
6082 #undef DEF_FUNCTION_TYPE_VAR_3
6083 #undef DEF_FUNCTION_TYPE_VAR_4
6084 #undef DEF_FUNCTION_TYPE_VAR_5
6085 #undef DEF_FUNCTION_TYPE_VAR_6
6086 #undef DEF_FUNCTION_TYPE_VAR_7
6087 #undef DEF_POINTER_TYPE
6088 builtin_types[(int) BT_LAST] = NULL_TREE;
6089 }
6090
6091 /* ----------------------------------------------------------------------- *
6092 * BUILTIN ATTRIBUTES *
6093 * ----------------------------------------------------------------------- */
6094
6095 enum built_in_attribute
6096 {
6097 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6098 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6099 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6100 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6101 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6102 #include "builtin-attrs.def"
6103 #undef DEF_ATTR_NULL_TREE
6104 #undef DEF_ATTR_INT
6105 #undef DEF_ATTR_STRING
6106 #undef DEF_ATTR_IDENT
6107 #undef DEF_ATTR_TREE_LIST
6108 ATTR_LAST
6109 };
6110
6111 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6112
6113 static void
6114 install_builtin_attributes (void)
6115 {
6116 /* Fill in the built_in_attributes array. */
6117 #define DEF_ATTR_NULL_TREE(ENUM) \
6118 built_in_attributes[(int) ENUM] = NULL_TREE;
6119 #define DEF_ATTR_INT(ENUM, VALUE) \
6120 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6121 #define DEF_ATTR_STRING(ENUM, VALUE) \
6122 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6123 #define DEF_ATTR_IDENT(ENUM, STRING) \
6124 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6125 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6126 built_in_attributes[(int) ENUM] \
6127 = tree_cons (built_in_attributes[(int) PURPOSE], \
6128 built_in_attributes[(int) VALUE], \
6129 built_in_attributes[(int) CHAIN]);
6130 #include "builtin-attrs.def"
6131 #undef DEF_ATTR_NULL_TREE
6132 #undef DEF_ATTR_INT
6133 #undef DEF_ATTR_STRING
6134 #undef DEF_ATTR_IDENT
6135 #undef DEF_ATTR_TREE_LIST
6136 }
6137
6138 /* Handle a "const" attribute; arguments as in
6139 struct attribute_spec.handler. */
6140
6141 static tree
6142 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6143 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6144 bool *no_add_attrs)
6145 {
6146 if (TREE_CODE (*node) == FUNCTION_DECL)
6147 TREE_READONLY (*node) = 1;
6148 else
6149 *no_add_attrs = true;
6150
6151 return NULL_TREE;
6152 }
6153
6154 /* Handle a "nothrow" attribute; arguments as in
6155 struct attribute_spec.handler. */
6156
6157 static tree
6158 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6159 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6160 bool *no_add_attrs)
6161 {
6162 if (TREE_CODE (*node) == FUNCTION_DECL)
6163 TREE_NOTHROW (*node) = 1;
6164 else
6165 *no_add_attrs = true;
6166
6167 return NULL_TREE;
6168 }
6169
6170 /* Handle a "pure" attribute; arguments as in
6171 struct attribute_spec.handler. */
6172
6173 static tree
6174 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6175 int ARG_UNUSED (flags), bool *no_add_attrs)
6176 {
6177 if (TREE_CODE (*node) == FUNCTION_DECL)
6178 DECL_PURE_P (*node) = 1;
6179 /* TODO: support types. */
6180 else
6181 {
6182 warning (OPT_Wattributes, "%qs attribute ignored",
6183 IDENTIFIER_POINTER (name));
6184 *no_add_attrs = true;
6185 }
6186
6187 return NULL_TREE;
6188 }
6189
6190 /* Handle a "no vops" attribute; arguments as in
6191 struct attribute_spec.handler. */
6192
6193 static tree
6194 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6195 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6196 bool *ARG_UNUSED (no_add_attrs))
6197 {
6198 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6199 DECL_IS_NOVOPS (*node) = 1;
6200 return NULL_TREE;
6201 }
6202
6203 /* Helper for nonnull attribute handling; fetch the operand number
6204 from the attribute argument list. */
6205
6206 static bool
6207 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6208 {
6209 /* Verify the arg number is a constant. */
6210 if (!tree_fits_uhwi_p (arg_num_expr))
6211 return false;
6212
6213 *valp = TREE_INT_CST_LOW (arg_num_expr);
6214 return true;
6215 }
6216
6217 /* Handle the "nonnull" attribute. */
6218 static tree
6219 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6220 tree args, int ARG_UNUSED (flags),
6221 bool *no_add_attrs)
6222 {
6223 tree type = *node;
6224 unsigned HOST_WIDE_INT attr_arg_num;
6225
6226 /* If no arguments are specified, all pointer arguments should be
6227 non-null. Verify a full prototype is given so that the arguments
6228 will have the correct types when we actually check them later.
6229 Avoid diagnosing type-generic built-ins since those have no
6230 prototype. */
6231 if (!args)
6232 {
6233 if (!prototype_p (type)
6234 && (!TYPE_ATTRIBUTES (type)
6235 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6236 {
6237 error ("nonnull attribute without arguments on a non-prototype");
6238 *no_add_attrs = true;
6239 }
6240 return NULL_TREE;
6241 }
6242
6243 /* Argument list specified. Verify that each argument number references
6244 a pointer argument. */
6245 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6246 {
6247 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6248
6249 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6250 {
6251 error ("nonnull argument has invalid operand number (argument %lu)",
6252 (unsigned long) attr_arg_num);
6253 *no_add_attrs = true;
6254 return NULL_TREE;
6255 }
6256
6257 if (prototype_p (type))
6258 {
6259 function_args_iterator iter;
6260 tree argument;
6261
6262 function_args_iter_init (&iter, type);
6263 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6264 {
6265 argument = function_args_iter_cond (&iter);
6266 if (!argument || ck_num == arg_num)
6267 break;
6268 }
6269
6270 if (!argument
6271 || TREE_CODE (argument) == VOID_TYPE)
6272 {
6273 error ("nonnull argument with out-of-range operand number "
6274 "(argument %lu, operand %lu)",
6275 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6276 *no_add_attrs = true;
6277 return NULL_TREE;
6278 }
6279
6280 if (TREE_CODE (argument) != POINTER_TYPE)
6281 {
6282 error ("nonnull argument references non-pointer operand "
6283 "(argument %lu, operand %lu)",
6284 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6285 *no_add_attrs = true;
6286 return NULL_TREE;
6287 }
6288 }
6289 }
6290
6291 return NULL_TREE;
6292 }
6293
6294 /* Handle a "sentinel" attribute. */
6295
6296 static tree
6297 handle_sentinel_attribute (tree *node, tree name, tree args,
6298 int ARG_UNUSED (flags), bool *no_add_attrs)
6299 {
6300 if (!prototype_p (*node))
6301 {
6302 warning (OPT_Wattributes,
6303 "%qs attribute requires prototypes with named arguments",
6304 IDENTIFIER_POINTER (name));
6305 *no_add_attrs = true;
6306 }
6307 else
6308 {
6309 if (!stdarg_p (*node))
6310 {
6311 warning (OPT_Wattributes,
6312 "%qs attribute only applies to variadic functions",
6313 IDENTIFIER_POINTER (name));
6314 *no_add_attrs = true;
6315 }
6316 }
6317
6318 if (args)
6319 {
6320 tree position = TREE_VALUE (args);
6321
6322 if (TREE_CODE (position) != INTEGER_CST)
6323 {
6324 warning (0, "requested position is not an integer constant");
6325 *no_add_attrs = true;
6326 }
6327 else
6328 {
6329 if (tree_int_cst_lt (position, integer_zero_node))
6330 {
6331 warning (0, "requested position is less than zero");
6332 *no_add_attrs = true;
6333 }
6334 }
6335 }
6336
6337 return NULL_TREE;
6338 }
6339
6340 /* Handle a "noreturn" attribute; arguments as in
6341 struct attribute_spec.handler. */
6342
6343 static tree
6344 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6345 int ARG_UNUSED (flags), bool *no_add_attrs)
6346 {
6347 tree type = TREE_TYPE (*node);
6348
6349 /* See FIXME comment in c_common_attribute_table. */
6350 if (TREE_CODE (*node) == FUNCTION_DECL)
6351 TREE_THIS_VOLATILE (*node) = 1;
6352 else if (TREE_CODE (type) == POINTER_TYPE
6353 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6354 TREE_TYPE (*node)
6355 = build_pointer_type
6356 (change_qualified_type (TREE_TYPE (type), TYPE_QUAL_VOLATILE));
6357 else
6358 {
6359 warning (OPT_Wattributes, "%qs attribute ignored",
6360 IDENTIFIER_POINTER (name));
6361 *no_add_attrs = true;
6362 }
6363
6364 return NULL_TREE;
6365 }
6366
6367 /* Handle a "stack_protect" attribute; arguments as in
6368 struct attribute_spec.handler. */
6369
6370 static tree
6371 handle_stack_protect_attribute (tree *node, tree name, tree, int,
6372 bool *no_add_attrs)
6373 {
6374 if (TREE_CODE (*node) != FUNCTION_DECL)
6375 {
6376 warning (OPT_Wattributes, "%qE attribute ignored", name);
6377 *no_add_attrs = true;
6378 }
6379
6380 return NULL_TREE;
6381 }
6382
6383 /* Handle a "noinline" attribute; arguments as in
6384 struct attribute_spec.handler. */
6385
6386 static tree
6387 handle_noinline_attribute (tree *node, tree name,
6388 tree ARG_UNUSED (args),
6389 int ARG_UNUSED (flags), bool *no_add_attrs)
6390 {
6391 if (TREE_CODE (*node) == FUNCTION_DECL)
6392 {
6393 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6394 {
6395 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6396 "with attribute %qs", name, "always_inline");
6397 *no_add_attrs = true;
6398 }
6399 else
6400 DECL_UNINLINABLE (*node) = 1;
6401 }
6402 else
6403 {
6404 warning (OPT_Wattributes, "%qE attribute ignored", name);
6405 *no_add_attrs = true;
6406 }
6407
6408 return NULL_TREE;
6409 }
6410
6411 /* Handle a "noclone" attribute; arguments as in
6412 struct attribute_spec.handler. */
6413
6414 static tree
6415 handle_noclone_attribute (tree *node, tree name,
6416 tree ARG_UNUSED (args),
6417 int ARG_UNUSED (flags), bool *no_add_attrs)
6418 {
6419 if (TREE_CODE (*node) != FUNCTION_DECL)
6420 {
6421 warning (OPT_Wattributes, "%qE attribute ignored", name);
6422 *no_add_attrs = true;
6423 }
6424
6425 return NULL_TREE;
6426 }
6427
6428 /* Handle a "no_icf" attribute; arguments as in
6429 struct attribute_spec.handler. */
6430
6431 static tree
6432 handle_noicf_attribute (tree *node, tree name,
6433 tree ARG_UNUSED (args),
6434 int ARG_UNUSED (flags), bool *no_add_attrs)
6435 {
6436 if (TREE_CODE (*node) != FUNCTION_DECL)
6437 {
6438 warning (OPT_Wattributes, "%qE attribute ignored", name);
6439 *no_add_attrs = true;
6440 }
6441
6442 return NULL_TREE;
6443 }
6444
6445 /* Handle a "noipa" attribute; arguments as in
6446 struct attribute_spec.handler. */
6447
6448 static tree
6449 handle_noipa_attribute (tree *node, tree name, tree, int, bool *no_add_attrs)
6450 {
6451 if (TREE_CODE (*node) != FUNCTION_DECL)
6452 {
6453 warning (OPT_Wattributes, "%qE attribute ignored", name);
6454 *no_add_attrs = true;
6455 }
6456
6457 return NULL_TREE;
6458 }
6459
6460 /* Handle a "leaf" attribute; arguments as in
6461 struct attribute_spec.handler. */
6462
6463 static tree
6464 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6465 int ARG_UNUSED (flags), bool *no_add_attrs)
6466 {
6467 if (TREE_CODE (*node) != FUNCTION_DECL)
6468 {
6469 warning (OPT_Wattributes, "%qE attribute ignored", name);
6470 *no_add_attrs = true;
6471 }
6472 if (!TREE_PUBLIC (*node))
6473 {
6474 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6475 *no_add_attrs = true;
6476 }
6477
6478 return NULL_TREE;
6479 }
6480
6481 /* Handle a "always_inline" attribute; arguments as in
6482 struct attribute_spec.handler. */
6483
6484 static tree
6485 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6486 int ARG_UNUSED (flags), bool *no_add_attrs)
6487 {
6488 if (TREE_CODE (*node) == FUNCTION_DECL)
6489 {
6490 /* Set the attribute and mark it for disregarding inline limits. */
6491 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6492 }
6493 else
6494 {
6495 warning (OPT_Wattributes, "%qE attribute ignored", name);
6496 *no_add_attrs = true;
6497 }
6498
6499 return NULL_TREE;
6500 }
6501
6502 /* Handle a "malloc" attribute; arguments as in
6503 struct attribute_spec.handler. */
6504
6505 static tree
6506 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6507 int ARG_UNUSED (flags), bool *no_add_attrs)
6508 {
6509 if (TREE_CODE (*node) == FUNCTION_DECL
6510 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6511 DECL_IS_MALLOC (*node) = 1;
6512 else
6513 {
6514 warning (OPT_Wattributes, "%qs attribute ignored",
6515 IDENTIFIER_POINTER (name));
6516 *no_add_attrs = true;
6517 }
6518
6519 return NULL_TREE;
6520 }
6521
6522 /* Fake handler for attributes we don't properly support. */
6523
6524 tree
6525 fake_attribute_handler (tree * ARG_UNUSED (node),
6526 tree ARG_UNUSED (name),
6527 tree ARG_UNUSED (args),
6528 int ARG_UNUSED (flags),
6529 bool * ARG_UNUSED (no_add_attrs))
6530 {
6531 return NULL_TREE;
6532 }
6533
6534 /* Handle a "type_generic" attribute. */
6535
6536 static tree
6537 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6538 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6539 bool * ARG_UNUSED (no_add_attrs))
6540 {
6541 /* Ensure we have a function type. */
6542 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6543
6544 /* Ensure we have a variadic function. */
6545 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6546
6547 return NULL_TREE;
6548 }
6549
6550 /* Handle a "flatten" attribute; arguments as in
6551 struct attribute_spec.handler. */
6552
6553 static tree
6554 handle_flatten_attribute (tree *node, tree name,
6555 tree args ATTRIBUTE_UNUSED,
6556 int flags ATTRIBUTE_UNUSED, bool *no_add_attrs)
6557 {
6558 if (TREE_CODE (*node) == FUNCTION_DECL)
6559 /* Do nothing else, just set the attribute. We'll get at
6560 it later with lookup_attribute. */
6561 ;
6562 else
6563 {
6564 warning (OPT_Wattributes, "%qE attribute ignored", name);
6565 *no_add_attrs = true;
6566 }
6567
6568 return NULL_TREE;
6569 }
6570
6571 /* Handle a "used" attribute; arguments as in
6572 struct attribute_spec.handler. */
6573
6574 static tree
6575 handle_used_attribute (tree *pnode, tree name, tree ARG_UNUSED (args),
6576 int ARG_UNUSED (flags), bool *no_add_attrs)
6577 {
6578 tree node = *pnode;
6579
6580 if (TREE_CODE (node) == FUNCTION_DECL
6581 || (VAR_P (node) && TREE_STATIC (node))
6582 || (TREE_CODE (node) == TYPE_DECL))
6583 {
6584 TREE_USED (node) = 1;
6585 DECL_PRESERVE_P (node) = 1;
6586 if (VAR_P (node))
6587 DECL_READ_P (node) = 1;
6588 }
6589 else
6590 {
6591 warning (OPT_Wattributes, "%qE attribute ignored", name);
6592 *no_add_attrs = true;
6593 }
6594
6595 return NULL_TREE;
6596 }
6597
6598 /* Handle a "cold" and attribute; arguments as in
6599 struct attribute_spec.handler. */
6600
6601 static tree
6602 handle_cold_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6603 int ARG_UNUSED (flags), bool *no_add_attrs)
6604 {
6605 if (TREE_CODE (*node) == FUNCTION_DECL
6606 || TREE_CODE (*node) == LABEL_DECL)
6607 {
6608 /* Attribute cold processing is done later with lookup_attribute. */
6609 }
6610 else
6611 {
6612 warning (OPT_Wattributes, "%qE attribute ignored", name);
6613 *no_add_attrs = true;
6614 }
6615
6616 return NULL_TREE;
6617 }
6618
6619 /* Handle a "hot" and attribute; arguments as in
6620 struct attribute_spec.handler. */
6621
6622 static tree
6623 handle_hot_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6624 int ARG_UNUSED (flags), bool *no_add_attrs)
6625 {
6626 if (TREE_CODE (*node) == FUNCTION_DECL
6627 || TREE_CODE (*node) == LABEL_DECL)
6628 {
6629 /* Attribute hot processing is done later with lookup_attribute. */
6630 }
6631 else
6632 {
6633 warning (OPT_Wattributes, "%qE attribute ignored", name);
6634 *no_add_attrs = true;
6635 }
6636
6637 return NULL_TREE;
6638 }
6639
6640 /* Handle a "target" attribute. */
6641
6642 static tree
6643 handle_target_attribute (tree *node, tree name, tree args, int flags,
6644 bool *no_add_attrs)
6645 {
6646 /* Ensure we have a function type. */
6647 if (TREE_CODE (*node) != FUNCTION_DECL)
6648 {
6649 warning (OPT_Wattributes, "%qE attribute ignored", name);
6650 *no_add_attrs = true;
6651 }
6652 else if (lookup_attribute ("target_clones", DECL_ATTRIBUTES (*node)))
6653 {
6654 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6655 "with %qs attribute", name, "target_clones");
6656 *no_add_attrs = true;
6657 }
6658 else if (!targetm.target_option.valid_attribute_p (*node, name, args, flags))
6659 *no_add_attrs = true;
6660
6661 /* Check that there's no empty string in values of the attribute. */
6662 for (tree t = args; t != NULL_TREE; t = TREE_CHAIN (t))
6663 {
6664 tree value = TREE_VALUE (t);
6665 if (TREE_CODE (value) == STRING_CST
6666 && TREE_STRING_LENGTH (value) == 1
6667 && TREE_STRING_POINTER (value)[0] == '\0')
6668 {
6669 warning (OPT_Wattributes, "empty string in attribute %<target%>");
6670 *no_add_attrs = true;
6671 }
6672 }
6673
6674 return NULL_TREE;
6675 }
6676
6677 /* Handle a "target_clones" attribute. */
6678
6679 static tree
6680 handle_target_clones_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6681 int ARG_UNUSED (flags), bool *no_add_attrs)
6682 {
6683 /* Ensure we have a function type. */
6684 if (TREE_CODE (*node) == FUNCTION_DECL)
6685 {
6686 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6687 {
6688 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6689 "with %qs attribute", name, "always_inline");
6690 *no_add_attrs = true;
6691 }
6692 else if (lookup_attribute ("target", DECL_ATTRIBUTES (*node)))
6693 {
6694 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6695 "with %qs attribute", name, "target");
6696 *no_add_attrs = true;
6697 }
6698 else
6699 /* Do not inline functions with multiple clone targets. */
6700 DECL_UNINLINABLE (*node) = 1;
6701 }
6702 else
6703 {
6704 warning (OPT_Wattributes, "%qE attribute ignored", name);
6705 *no_add_attrs = true;
6706 }
6707 return NULL_TREE;
6708 }
6709
6710 /* Handle a "vector_size" attribute; arguments as in
6711 struct attribute_spec.handler. */
6712
6713 static tree
6714 handle_vector_size_attribute (tree *node, tree name, tree args,
6715 int ARG_UNUSED (flags), bool *no_add_attrs)
6716 {
6717 tree type = *node;
6718 tree vector_type;
6719
6720 *no_add_attrs = true;
6721
6722 /* We need to provide for vector pointers, vector arrays, and
6723 functions returning vectors. For example:
6724
6725 __attribute__((vector_size(16))) short *foo;
6726
6727 In this case, the mode is SI, but the type being modified is
6728 HI, so we need to look further. */
6729 while (POINTER_TYPE_P (type)
6730 || TREE_CODE (type) == FUNCTION_TYPE
6731 || TREE_CODE (type) == ARRAY_TYPE)
6732 type = TREE_TYPE (type);
6733
6734 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6735 if (!vector_type)
6736 return NULL_TREE;
6737
6738 /* Build back pointers if needed. */
6739 *node = reconstruct_complex_type (*node, vector_type);
6740
6741 return NULL_TREE;
6742 }
6743
6744 /* Handle a "vector_type" attribute; arguments as in
6745 struct attribute_spec.handler. */
6746
6747 static tree
6748 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6749 int ARG_UNUSED (flags), bool *no_add_attrs)
6750 {
6751 tree type = *node;
6752 tree vector_type;
6753
6754 *no_add_attrs = true;
6755
6756 if (TREE_CODE (type) != ARRAY_TYPE)
6757 {
6758 error ("attribute %qs applies to array types only",
6759 IDENTIFIER_POINTER (name));
6760 return NULL_TREE;
6761 }
6762
6763 vector_type = build_vector_type_for_array (type, name);
6764 if (!vector_type)
6765 return NULL_TREE;
6766
6767 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6768 *node = vector_type;
6769
6770 return NULL_TREE;
6771 }
6772
6773 /* ----------------------------------------------------------------------- *
6774 * BUILTIN FUNCTIONS *
6775 * ----------------------------------------------------------------------- */
6776
6777 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6778 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6779 if nonansi_p and flag_no_nonansi_builtin. */
6780
6781 static void
6782 def_builtin_1 (enum built_in_function fncode,
6783 const char *name,
6784 enum built_in_class fnclass,
6785 tree fntype, tree libtype,
6786 bool both_p, bool fallback_p,
6787 bool nonansi_p ATTRIBUTE_UNUSED,
6788 tree fnattrs, bool implicit_p)
6789 {
6790 tree decl;
6791 const char *libname;
6792
6793 /* Preserve an already installed decl. It most likely was setup in advance
6794 (e.g. as part of the internal builtins) for specific reasons. */
6795 if (builtin_decl_explicit (fncode))
6796 return;
6797
6798 if (fntype == error_mark_node)
6799 return;
6800
6801 gcc_assert ((!both_p && !fallback_p)
6802 || !strncmp (name, "__builtin_",
6803 strlen ("__builtin_")));
6804
6805 libname = name + strlen ("__builtin_");
6806 decl = add_builtin_function (name, fntype, fncode, fnclass,
6807 (fallback_p ? libname : NULL),
6808 fnattrs);
6809 if (both_p)
6810 /* ??? This is normally further controlled by command-line options
6811 like -fno-builtin, but we don't have them for Ada. */
6812 add_builtin_function (libname, libtype, fncode, fnclass,
6813 NULL, fnattrs);
6814
6815 set_builtin_decl (fncode, decl, implicit_p);
6816 }
6817
6818 static int flag_isoc94 = 0;
6819 static int flag_isoc99 = 0;
6820 static int flag_isoc11 = 0;
6821
6822 /* Install what the common builtins.def offers plus our local additions.
6823
6824 Note that ada-builtins.def is included first so that locally redefined
6825 built-in functions take precedence over the commonly defined ones. */
6826
6827 static void
6828 install_builtin_functions (void)
6829 {
6830 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6831 NONANSI_P, ATTRS, IMPLICIT, COND) \
6832 if (NAME && COND) \
6833 def_builtin_1 (ENUM, NAME, CLASS, \
6834 builtin_types[(int) TYPE], \
6835 builtin_types[(int) LIBTYPE], \
6836 BOTH_P, FALLBACK_P, NONANSI_P, \
6837 built_in_attributes[(int) ATTRS], IMPLICIT);
6838 #define DEF_ADA_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
6839 DEF_BUILTIN (ENUM, "__builtin_" NAME, BUILT_IN_FRONTEND, TYPE, BT_LAST, \
6840 false, false, false, ATTRS, true, true)
6841 #include "ada-builtins.def"
6842 #include "builtins.def"
6843 }
6844
6845 /* ----------------------------------------------------------------------- *
6846 * BUILTIN FUNCTIONS *
6847 * ----------------------------------------------------------------------- */
6848
6849 /* Install the builtin functions we might need. */
6850
6851 void
6852 gnat_install_builtins (void)
6853 {
6854 install_builtin_elementary_types ();
6855 install_builtin_function_types ();
6856 install_builtin_attributes ();
6857
6858 /* Install builtins used by generic middle-end pieces first. Some of these
6859 know about internal specificities and control attributes accordingly, for
6860 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6861 the generic definition from builtins.def. */
6862 build_common_builtin_nodes ();
6863
6864 /* Now, install the target specific builtins, such as the AltiVec family on
6865 ppc, and the common set as exposed by builtins.def. */
6866 targetm.init_builtins ();
6867 install_builtin_functions ();
6868 }
6869
6870 #include "gt-ada-utils.h"
6871 #include "gtype-ada.h"