Make mode_for_size_tree return an opt_mode
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2017, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
101
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "noinline", 0, 0, true, false, false, handle_noinline_attribute,
127 false },
128 { "noclone", 0, 0, true, false, false, handle_noclone_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
138
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
150
151 { NULL, 0, 0, false, false, false, NULL, false }
152 };
153
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
158
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
161
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
164
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
167
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
170
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
173
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
182
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
187
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
190
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
193
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
202 };
203
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
206
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
209
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
212
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
215
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
218
219 /* A chain of unused BLOCK nodes. */
220 static GTY((deletable)) tree free_block_chain;
221
222 /* A hash table of padded types. It is modelled on the generic type
223 hash table in tree.c, which must thus be used as a reference. */
224
225 struct GTY((for_user)) pad_type_hash {
226 unsigned long hash;
227 tree type;
228 };
229
230 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
231 {
232 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
233 static bool equal (pad_type_hash *a, pad_type_hash *b);
234
235 static int
236 keep_cache_entry (pad_type_hash *&t)
237 {
238 return ggc_marked_p (t->type);
239 }
240 };
241
242 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
243
244 static tree merge_sizes (tree, tree, tree, bool, bool);
245 static tree fold_bit_position (const_tree);
246 static tree compute_related_constant (tree, tree);
247 static tree split_plus (tree, tree *);
248 static tree float_type_for_precision (int, machine_mode);
249 static tree convert_to_fat_pointer (tree, tree);
250 static unsigned int scale_by_factor_of (tree, unsigned int);
251 static bool potential_alignment_gap (tree, tree, tree);
252
253 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
254 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
255 struct deferred_decl_context_node
256 {
257 /* The ..._DECL node to work on. */
258 tree decl;
259
260 /* The corresponding entity's Scope. */
261 Entity_Id gnat_scope;
262
263 /* The value of force_global when DECL was pushed. */
264 int force_global;
265
266 /* The list of ..._TYPE nodes to propagate the context to. */
267 vec<tree> types;
268
269 /* The next queue item. */
270 struct deferred_decl_context_node *next;
271 };
272
273 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
274
275 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
276 feed it with the elaboration of GNAT_SCOPE. */
277 static struct deferred_decl_context_node *
278 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
279
280 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
281 feed it with the DECL_CONTEXT computed as part of N as soon as it is
282 computed. */
283 static void add_deferred_type_context (struct deferred_decl_context_node *n,
284 tree type);
285 \f
286 /* Initialize data structures of the utils.c module. */
287
288 void
289 init_gnat_utils (void)
290 {
291 /* Initialize the association of GNAT nodes to GCC trees. */
292 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
293
294 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
295 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296
297 /* Initialize the hash table of padded types. */
298 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
299 }
300
301 /* Destroy data structures of the utils.c module. */
302
303 void
304 destroy_gnat_utils (void)
305 {
306 /* Destroy the association of GNAT nodes to GCC trees. */
307 ggc_free (associate_gnat_to_gnu);
308 associate_gnat_to_gnu = NULL;
309
310 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
311 ggc_free (dummy_node_table);
312 dummy_node_table = NULL;
313
314 /* Destroy the hash table of padded types. */
315 pad_type_hash_table->empty ();
316 pad_type_hash_table = NULL;
317 }
318 \f
319 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
320 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
321 If NO_CHECK is true, the latter check is suppressed.
322
323 If GNU_DECL is zero, reset a previous association. */
324
325 void
326 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
327 {
328 /* Check that GNAT_ENTITY is not already defined and that it is being set
329 to something which is a decl. If that is not the case, this usually
330 means GNAT_ENTITY is defined twice, but occasionally is due to some
331 Gigi problem. */
332 gcc_assert (!(gnu_decl
333 && (PRESENT_GNU_TREE (gnat_entity)
334 || (!no_check && !DECL_P (gnu_decl)))));
335
336 SET_GNU_TREE (gnat_entity, gnu_decl);
337 }
338
339 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
340 that was associated with it. If there is no such tree node, abort.
341
342 In some cases, such as delayed elaboration or expressions that need to
343 be elaborated only once, GNAT_ENTITY is really not an entity. */
344
345 tree
346 get_gnu_tree (Entity_Id gnat_entity)
347 {
348 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
349 return GET_GNU_TREE (gnat_entity);
350 }
351
352 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
353
354 bool
355 present_gnu_tree (Entity_Id gnat_entity)
356 {
357 return PRESENT_GNU_TREE (gnat_entity);
358 }
359 \f
360 /* Make a dummy type corresponding to GNAT_TYPE. */
361
362 tree
363 make_dummy_type (Entity_Id gnat_type)
364 {
365 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
366 tree gnu_type, debug_type;
367
368 /* If there was no equivalent type (can only happen when just annotating
369 types) or underlying type, go back to the original type. */
370 if (No (gnat_equiv))
371 gnat_equiv = gnat_type;
372
373 /* If it there already a dummy type, use that one. Else make one. */
374 if (PRESENT_DUMMY_NODE (gnat_equiv))
375 return GET_DUMMY_NODE (gnat_equiv);
376
377 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
378 an ENUMERAL_TYPE. */
379 gnu_type = make_node (Is_Record_Type (gnat_equiv)
380 ? tree_code_for_record_type (gnat_equiv)
381 : ENUMERAL_TYPE);
382 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
383 TYPE_DUMMY_P (gnu_type) = 1;
384 TYPE_STUB_DECL (gnu_type)
385 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
386 if (Is_By_Reference_Type (gnat_equiv))
387 TYPE_BY_REFERENCE_P (gnu_type) = 1;
388
389 SET_DUMMY_NODE (gnat_equiv, gnu_type);
390
391 /* Create a debug type so that debug info consumers only see an unspecified
392 type. */
393 if (Needs_Debug_Info (gnat_type))
394 {
395 debug_type = make_node (LANG_TYPE);
396 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
397
398 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
399 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
400 }
401
402 return gnu_type;
403 }
404
405 /* Return the dummy type that was made for GNAT_TYPE, if any. */
406
407 tree
408 get_dummy_type (Entity_Id gnat_type)
409 {
410 return GET_DUMMY_NODE (gnat_type);
411 }
412
413 /* Build dummy fat and thin pointer types whose designated type is specified
414 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
415
416 void
417 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
418 {
419 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
420 tree gnu_fat_type, fields, gnu_object_type;
421
422 gnu_template_type = make_node (RECORD_TYPE);
423 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
424 TYPE_DUMMY_P (gnu_template_type) = 1;
425 gnu_ptr_template = build_pointer_type (gnu_template_type);
426
427 gnu_array_type = make_node (ENUMERAL_TYPE);
428 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
429 TYPE_DUMMY_P (gnu_array_type) = 1;
430 gnu_ptr_array = build_pointer_type (gnu_array_type);
431
432 gnu_fat_type = make_node (RECORD_TYPE);
433 /* Build a stub DECL to trigger the special processing for fat pointer types
434 in gnat_pushdecl. */
435 TYPE_NAME (gnu_fat_type)
436 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
437 gnu_fat_type);
438 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
439 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
440 DECL_CHAIN (fields)
441 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 finish_fat_pointer_type (gnu_fat_type, fields);
444 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
445 /* Suppress debug info until after the type is completed. */
446 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
447
448 gnu_object_type = make_node (RECORD_TYPE);
449 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
450 TYPE_DUMMY_P (gnu_object_type) = 1;
451
452 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
453 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
454 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
455 }
456 \f
457 /* Return true if we are in the global binding level. */
458
459 bool
460 global_bindings_p (void)
461 {
462 return force_global || !current_function_decl;
463 }
464
465 /* Enter a new binding level. */
466
467 void
468 gnat_pushlevel (void)
469 {
470 struct gnat_binding_level *newlevel = NULL;
471
472 /* Reuse a struct for this binding level, if there is one. */
473 if (free_binding_level)
474 {
475 newlevel = free_binding_level;
476 free_binding_level = free_binding_level->chain;
477 }
478 else
479 newlevel = ggc_alloc<gnat_binding_level> ();
480
481 /* Use a free BLOCK, if any; otherwise, allocate one. */
482 if (free_block_chain)
483 {
484 newlevel->block = free_block_chain;
485 free_block_chain = BLOCK_CHAIN (free_block_chain);
486 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
487 }
488 else
489 newlevel->block = make_node (BLOCK);
490
491 /* Point the BLOCK we just made to its parent. */
492 if (current_binding_level)
493 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
494
495 BLOCK_VARS (newlevel->block) = NULL_TREE;
496 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
497 TREE_USED (newlevel->block) = 1;
498
499 /* Add this level to the front of the chain (stack) of active levels. */
500 newlevel->chain = current_binding_level;
501 newlevel->jmpbuf_decl = NULL_TREE;
502 current_binding_level = newlevel;
503 }
504
505 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
506 and point FNDECL to this BLOCK. */
507
508 void
509 set_current_block_context (tree fndecl)
510 {
511 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
512 DECL_INITIAL (fndecl) = current_binding_level->block;
513 set_block_for_group (current_binding_level->block);
514 }
515
516 /* Set the jmpbuf_decl for the current binding level to DECL. */
517
518 void
519 set_block_jmpbuf_decl (tree decl)
520 {
521 current_binding_level->jmpbuf_decl = decl;
522 }
523
524 /* Get the jmpbuf_decl, if any, for the current binding level. */
525
526 tree
527 get_block_jmpbuf_decl (void)
528 {
529 return current_binding_level->jmpbuf_decl;
530 }
531
532 /* Exit a binding level. Set any BLOCK into the current code group. */
533
534 void
535 gnat_poplevel (void)
536 {
537 struct gnat_binding_level *level = current_binding_level;
538 tree block = level->block;
539
540 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
541 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
542
543 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
544 are no variables free the block and merge its subblocks into those of its
545 parent block. Otherwise, add it to the list of its parent. */
546 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
547 ;
548 else if (!BLOCK_VARS (block))
549 {
550 BLOCK_SUBBLOCKS (level->chain->block)
551 = block_chainon (BLOCK_SUBBLOCKS (block),
552 BLOCK_SUBBLOCKS (level->chain->block));
553 BLOCK_CHAIN (block) = free_block_chain;
554 free_block_chain = block;
555 }
556 else
557 {
558 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
559 BLOCK_SUBBLOCKS (level->chain->block) = block;
560 TREE_USED (block) = 1;
561 set_block_for_group (block);
562 }
563
564 /* Free this binding structure. */
565 current_binding_level = level->chain;
566 level->chain = free_binding_level;
567 free_binding_level = level;
568 }
569
570 /* Exit a binding level and discard the associated BLOCK. */
571
572 void
573 gnat_zaplevel (void)
574 {
575 struct gnat_binding_level *level = current_binding_level;
576 tree block = level->block;
577
578 BLOCK_CHAIN (block) = free_block_chain;
579 free_block_chain = block;
580
581 /* Free this binding structure. */
582 current_binding_level = level->chain;
583 level->chain = free_binding_level;
584 free_binding_level = level;
585 }
586 \f
587 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
588
589 static void
590 gnat_set_type_context (tree type, tree context)
591 {
592 tree decl = TYPE_STUB_DECL (type);
593
594 TYPE_CONTEXT (type) = context;
595
596 while (decl && DECL_PARALLEL_TYPE (decl))
597 {
598 tree parallel_type = DECL_PARALLEL_TYPE (decl);
599
600 /* Give a context to the parallel types and their stub decl, if any.
601 Some parallel types seems to be present in multiple parallel type
602 chains, so don't mess with their context if they already have one. */
603 if (!TYPE_CONTEXT (parallel_type))
604 {
605 if (TYPE_STUB_DECL (parallel_type))
606 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
607 TYPE_CONTEXT (parallel_type) = context;
608 }
609
610 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
611 }
612 }
613
614 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
615 the debug info, or Empty if there is no such scope. If not NULL, set
616 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
617
618 Entity_Id
619 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
620 {
621 Entity_Id gnat_entity;
622
623 if (is_subprogram)
624 *is_subprogram = false;
625
626 if (Nkind (gnat_node) == N_Defining_Identifier
627 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
628 gnat_entity = Scope (gnat_node);
629 else
630 return Empty;
631
632 while (Present (gnat_entity))
633 {
634 switch (Ekind (gnat_entity))
635 {
636 case E_Function:
637 case E_Procedure:
638 if (Present (Protected_Body_Subprogram (gnat_entity)))
639 gnat_entity = Protected_Body_Subprogram (gnat_entity);
640
641 /* If the scope is a subprogram, then just rely on
642 current_function_decl, so that we don't have to defer
643 anything. This is needed because other places rely on the
644 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
645 if (is_subprogram)
646 *is_subprogram = true;
647 return gnat_entity;
648
649 case E_Record_Type:
650 case E_Record_Subtype:
651 return gnat_entity;
652
653 default:
654 /* By default, we are not interested in this particular scope: go to
655 the outer one. */
656 break;
657 }
658
659 gnat_entity = Scope (gnat_entity);
660 }
661
662 return Empty;
663 }
664
665 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
666 of N otherwise. */
667
668 static void
669 defer_or_set_type_context (tree type, tree context,
670 struct deferred_decl_context_node *n)
671 {
672 if (n)
673 add_deferred_type_context (n, type);
674 else
675 gnat_set_type_context (type, context);
676 }
677
678 /* Return global_context, but create it first if need be. */
679
680 static tree
681 get_global_context (void)
682 {
683 if (!global_context)
684 {
685 global_context
686 = build_translation_unit_decl (get_identifier (main_input_filename));
687 debug_hooks->register_main_translation_unit (global_context);
688 }
689
690 return global_context;
691 }
692
693 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
694 for location information and flag propagation. */
695
696 void
697 gnat_pushdecl (tree decl, Node_Id gnat_node)
698 {
699 tree context = NULL_TREE;
700 struct deferred_decl_context_node *deferred_decl_context = NULL;
701
702 /* If explicitely asked to make DECL global or if it's an imported nested
703 object, short-circuit the regular Scope-based context computation. */
704 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
705 {
706 /* Rely on the GNAT scope, or fallback to the current_function_decl if
707 the GNAT scope reached the global scope, if it reached a subprogram
708 or the declaration is a subprogram or a variable (for them we skip
709 intermediate context types because the subprogram body elaboration
710 machinery and the inliner both expect a subprogram context).
711
712 Falling back to current_function_decl is necessary for implicit
713 subprograms created by gigi, such as the elaboration subprograms. */
714 bool context_is_subprogram = false;
715 const Entity_Id gnat_scope
716 = get_debug_scope (gnat_node, &context_is_subprogram);
717
718 if (Present (gnat_scope)
719 && !context_is_subprogram
720 && TREE_CODE (decl) != FUNCTION_DECL
721 && TREE_CODE (decl) != VAR_DECL)
722 /* Always assume the scope has not been elaborated, thus defer the
723 context propagation to the time its elaboration will be
724 available. */
725 deferred_decl_context
726 = add_deferred_decl_context (decl, gnat_scope, force_global);
727
728 /* External declarations (when force_global > 0) may not be in a
729 local context. */
730 else if (current_function_decl && force_global == 0)
731 context = current_function_decl;
732 }
733
734 /* If either we are forced to be in global mode or if both the GNAT scope and
735 the current_function_decl did not help in determining the context, use the
736 global scope. */
737 if (!deferred_decl_context && !context)
738 context = get_global_context ();
739
740 /* Functions imported in another function are not really nested.
741 For really nested functions mark them initially as needing
742 a static chain for uses of that flag before unnesting;
743 lower_nested_functions will then recompute it. */
744 if (TREE_CODE (decl) == FUNCTION_DECL
745 && !TREE_PUBLIC (decl)
746 && context
747 && (TREE_CODE (context) == FUNCTION_DECL
748 || decl_function_context (context)))
749 DECL_STATIC_CHAIN (decl) = 1;
750
751 if (!deferred_decl_context)
752 DECL_CONTEXT (decl) = context;
753
754 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
755
756 /* Set the location of DECL and emit a declaration for it. */
757 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
758 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
759
760 add_decl_expr (decl, gnat_node);
761
762 /* Put the declaration on the list. The list of declarations is in reverse
763 order. The list will be reversed later. Put global declarations in the
764 globals list and local ones in the current block. But skip TYPE_DECLs
765 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
766 with the debugger and aren't needed anyway. */
767 if (!(TREE_CODE (decl) == TYPE_DECL
768 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
769 {
770 /* External declarations must go to the binding level they belong to.
771 This will make corresponding imported entities are available in the
772 debugger at the proper time. */
773 if (DECL_EXTERNAL (decl)
774 && TREE_CODE (decl) == FUNCTION_DECL
775 && DECL_BUILT_IN (decl))
776 vec_safe_push (builtin_decls, decl);
777 else if (global_bindings_p ())
778 vec_safe_push (global_decls, decl);
779 else
780 {
781 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
782 BLOCK_VARS (current_binding_level->block) = decl;
783 }
784 }
785
786 /* For the declaration of a type, set its name either if it isn't already
787 set or if the previous type name was not derived from a source name.
788 We'd rather have the type named with a real name and all the pointer
789 types to the same object have the same node, except when the names are
790 both derived from source names. */
791 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
792 {
793 tree t = TREE_TYPE (decl);
794
795 /* Array and pointer types aren't tagged types in the C sense so we need
796 to generate a typedef in DWARF for them and make sure it is preserved,
797 unless the type is artificial. */
798 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
799 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
800 || DECL_ARTIFICIAL (decl)))
801 ;
802 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
803 generate the typedef in DWARF. Also do that for fat pointer types
804 because, even though they are tagged types in the C sense, they are
805 still XUP types attached to the base array type at this point. */
806 else if (!DECL_ARTIFICIAL (decl)
807 && (TREE_CODE (t) == ARRAY_TYPE
808 || TREE_CODE (t) == POINTER_TYPE
809 || TYPE_IS_FAT_POINTER_P (t)))
810 {
811 tree tt = build_variant_type_copy (t);
812 TYPE_NAME (tt) = decl;
813 defer_or_set_type_context (tt,
814 DECL_CONTEXT (decl),
815 deferred_decl_context);
816 TREE_TYPE (decl) = tt;
817 if (TYPE_NAME (t)
818 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
819 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
820 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
821 else
822 DECL_ORIGINAL_TYPE (decl) = t;
823 /* Array types need to have a name so that they can be related to
824 their GNAT encodings. */
825 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
826 TYPE_NAME (t) = DECL_NAME (decl);
827 t = NULL_TREE;
828 }
829 else if (TYPE_NAME (t)
830 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
831 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
832 ;
833 else
834 t = NULL_TREE;
835
836 /* Propagate the name to all the variants, this is needed for the type
837 qualifiers machinery to work properly (see check_qualified_type).
838 Also propagate the context to them. Note that it will be propagated
839 to all parallel types too thanks to gnat_set_type_context. */
840 if (t)
841 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
842 /* ??? Because of the previous kludge, we can have variants of fat
843 pointer types with different names. */
844 if (!(TYPE_IS_FAT_POINTER_P (t)
845 && TYPE_NAME (t)
846 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
847 {
848 TYPE_NAME (t) = decl;
849 defer_or_set_type_context (t,
850 DECL_CONTEXT (decl),
851 deferred_decl_context);
852 }
853 }
854 }
855 \f
856 /* Create a record type that contains a SIZE bytes long field of TYPE with a
857 starting bit position so that it is aligned to ALIGN bits, and leaving at
858 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
859 record is guaranteed to get. GNAT_NODE is used for the position of the
860 associated TYPE_DECL. */
861
862 tree
863 make_aligning_type (tree type, unsigned int align, tree size,
864 unsigned int base_align, int room, Node_Id gnat_node)
865 {
866 /* We will be crafting a record type with one field at a position set to be
867 the next multiple of ALIGN past record'address + room bytes. We use a
868 record placeholder to express record'address. */
869 tree record_type = make_node (RECORD_TYPE);
870 tree record = build0 (PLACEHOLDER_EXPR, record_type);
871
872 tree record_addr_st
873 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
874
875 /* The diagram below summarizes the shape of what we manipulate:
876
877 <--------- pos ---------->
878 { +------------+-------------+-----------------+
879 record =>{ |############| ... | field (type) |
880 { +------------+-------------+-----------------+
881 |<-- room -->|<- voffset ->|<---- size ----->|
882 o o
883 | |
884 record_addr vblock_addr
885
886 Every length is in sizetype bytes there, except "pos" which has to be
887 set as a bit position in the GCC tree for the record. */
888 tree room_st = size_int (room);
889 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
890 tree voffset_st, pos, field;
891
892 tree name = TYPE_IDENTIFIER (type);
893
894 name = concat_name (name, "ALIGN");
895 TYPE_NAME (record_type) = name;
896
897 /* Compute VOFFSET and then POS. The next byte position multiple of some
898 alignment after some address is obtained by "and"ing the alignment minus
899 1 with the two's complement of the address. */
900 voffset_st = size_binop (BIT_AND_EXPR,
901 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
902 size_int ((align / BITS_PER_UNIT) - 1));
903
904 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
905 pos = size_binop (MULT_EXPR,
906 convert (bitsizetype,
907 size_binop (PLUS_EXPR, room_st, voffset_st)),
908 bitsize_unit_node);
909
910 /* Craft the GCC record representation. We exceptionally do everything
911 manually here because 1) our generic circuitry is not quite ready to
912 handle the complex position/size expressions we are setting up, 2) we
913 have a strong simplifying factor at hand: we know the maximum possible
914 value of voffset, and 3) we have to set/reset at least the sizes in
915 accordance with this maximum value anyway, as we need them to convey
916 what should be "alloc"ated for this type.
917
918 Use -1 as the 'addressable' indication for the field to prevent the
919 creation of a bitfield. We don't need one, it would have damaging
920 consequences on the alignment computation, and create_field_decl would
921 make one without this special argument, for instance because of the
922 complex position expression. */
923 field = create_field_decl (get_identifier ("F"), type, record_type, size,
924 pos, 1, -1);
925 TYPE_FIELDS (record_type) = field;
926
927 SET_TYPE_ALIGN (record_type, base_align);
928 TYPE_USER_ALIGN (record_type) = 1;
929
930 TYPE_SIZE (record_type)
931 = size_binop (PLUS_EXPR,
932 size_binop (MULT_EXPR, convert (bitsizetype, size),
933 bitsize_unit_node),
934 bitsize_int (align + room * BITS_PER_UNIT));
935 TYPE_SIZE_UNIT (record_type)
936 = size_binop (PLUS_EXPR, size,
937 size_int (room + align / BITS_PER_UNIT));
938
939 SET_TYPE_MODE (record_type, BLKmode);
940 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
941
942 /* Declare it now since it will never be declared otherwise. This is
943 necessary to ensure that its subtrees are properly marked. */
944 create_type_decl (name, record_type, true, false, gnat_node);
945
946 return record_type;
947 }
948
949 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
950 as the field type of a packed record if IN_RECORD is true, or as the
951 component type of a packed array if IN_RECORD is false. See if we can
952 rewrite it either as a type that has non-BLKmode, which we can pack
953 tighter in the packed record case, or as a smaller type with at most
954 MAX_ALIGN alignment if the value is non-zero. If so, return the new
955 type; if not, return the original type. */
956
957 tree
958 make_packable_type (tree type, bool in_record, unsigned int max_align)
959 {
960 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
961 unsigned HOST_WIDE_INT new_size;
962 unsigned int align = TYPE_ALIGN (type);
963 unsigned int new_align;
964
965 /* No point in doing anything if the size is zero. */
966 if (size == 0)
967 return type;
968
969 tree new_type = make_node (TREE_CODE (type));
970
971 /* Copy the name and flags from the old type to that of the new.
972 Note that we rely on the pointer equality created here for
973 TYPE_NAME to look through conversions in various places. */
974 TYPE_NAME (new_type) = TYPE_NAME (type);
975 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
976 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
977 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
978 if (TREE_CODE (type) == RECORD_TYPE)
979 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
980
981 /* If we are in a record and have a small size, set the alignment to
982 try for an integral mode. Otherwise set it to try for a smaller
983 type with BLKmode. */
984 if (in_record && size <= MAX_FIXED_MODE_SIZE)
985 {
986 new_size = ceil_pow2 (size);
987 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
988 SET_TYPE_ALIGN (new_type, new_align);
989 }
990 else
991 {
992 /* Do not try to shrink the size if the RM size is not constant. */
993 if (TYPE_CONTAINS_TEMPLATE_P (type)
994 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
995 return type;
996
997 /* Round the RM size up to a unit boundary to get the minimal size
998 for a BLKmode record. Give up if it's already the size and we
999 don't need to lower the alignment. */
1000 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1001 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1002 if (new_size == size && (max_align == 0 || align <= max_align))
1003 return type;
1004
1005 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1006 if (max_align > 0 && new_align > max_align)
1007 new_align = max_align;
1008 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1009 }
1010
1011 TYPE_USER_ALIGN (new_type) = 1;
1012
1013 /* Now copy the fields, keeping the position and size as we don't want
1014 to change the layout by propagating the packedness downwards. */
1015 tree new_field_list = NULL_TREE;
1016 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1017 {
1018 tree new_field_type = TREE_TYPE (field);
1019 tree new_field, new_size;
1020
1021 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1022 && !TYPE_FAT_POINTER_P (new_field_type)
1023 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1024 new_field_type = make_packable_type (new_field_type, true, max_align);
1025
1026 /* However, for the last field in a not already packed record type
1027 that is of an aggregate type, we need to use the RM size in the
1028 packable version of the record type, see finish_record_type. */
1029 if (!DECL_CHAIN (field)
1030 && !TYPE_PACKED (type)
1031 && RECORD_OR_UNION_TYPE_P (new_field_type)
1032 && !TYPE_FAT_POINTER_P (new_field_type)
1033 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1034 && TYPE_ADA_SIZE (new_field_type))
1035 new_size = TYPE_ADA_SIZE (new_field_type);
1036 else
1037 new_size = DECL_SIZE (field);
1038
1039 new_field
1040 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1041 new_size, bit_position (field),
1042 TYPE_PACKED (type),
1043 !DECL_NONADDRESSABLE_P (field));
1044
1045 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1046 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1047 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1048 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1049
1050 DECL_CHAIN (new_field) = new_field_list;
1051 new_field_list = new_field;
1052 }
1053
1054 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1055 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1056 if (TYPE_STUB_DECL (type))
1057 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1058 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1059
1060 /* If this is a padding record, we never want to make the size smaller
1061 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1062 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1063 {
1064 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1065 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1066 new_size = size;
1067 }
1068 else
1069 {
1070 TYPE_SIZE (new_type) = bitsize_int (new_size);
1071 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1072 }
1073
1074 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1075 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1076
1077 compute_record_mode (new_type);
1078
1079 /* Try harder to get a packable type if necessary, for example
1080 in case the record itself contains a BLKmode field. */
1081 if (in_record && TYPE_MODE (new_type) == BLKmode)
1082 SET_TYPE_MODE (new_type,
1083 mode_for_size_tree (TYPE_SIZE (new_type),
1084 MODE_INT, 1).else_blk ());
1085
1086 /* If neither mode nor size nor alignment shrunk, return the old type. */
1087 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1088 return type;
1089
1090 return new_type;
1091 }
1092
1093 /* Return true if TYPE has an unsigned representation. This needs to be used
1094 when the representation of types whose precision is not equal to their size
1095 is manipulated based on the RM size. */
1096
1097 static inline bool
1098 type_unsigned_for_rm (tree type)
1099 {
1100 /* This is the common case. */
1101 if (TYPE_UNSIGNED (type))
1102 return true;
1103
1104 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1105 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1106 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1107 return true;
1108
1109 return false;
1110 }
1111
1112 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1113 If TYPE is the best type, return it. Otherwise, make a new type. We
1114 only support new integral and pointer types. FOR_BIASED is true if
1115 we are making a biased type. */
1116
1117 tree
1118 make_type_from_size (tree type, tree size_tree, bool for_biased)
1119 {
1120 unsigned HOST_WIDE_INT size;
1121 bool biased_p;
1122 tree new_type;
1123
1124 /* If size indicates an error, just return TYPE to avoid propagating
1125 the error. Likewise if it's too large to represent. */
1126 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1127 return type;
1128
1129 size = tree_to_uhwi (size_tree);
1130
1131 switch (TREE_CODE (type))
1132 {
1133 case INTEGER_TYPE:
1134 case ENUMERAL_TYPE:
1135 case BOOLEAN_TYPE:
1136 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1137 && TYPE_BIASED_REPRESENTATION_P (type));
1138
1139 /* Integer types with precision 0 are forbidden. */
1140 if (size == 0)
1141 size = 1;
1142
1143 /* Only do something if the type isn't a packed array type and doesn't
1144 already have the proper size and the size isn't too large. */
1145 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1146 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1147 || size > LONG_LONG_TYPE_SIZE)
1148 break;
1149
1150 biased_p |= for_biased;
1151
1152 /* The type should be an unsigned type if the original type is unsigned
1153 or if the lower bound is constant and non-negative or if the type is
1154 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1155 if (type_unsigned_for_rm (type) || biased_p)
1156 new_type = make_unsigned_type (size);
1157 else
1158 new_type = make_signed_type (size);
1159 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1160 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1161 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1162 /* Copy the name to show that it's essentially the same type and
1163 not a subrange type. */
1164 TYPE_NAME (new_type) = TYPE_NAME (type);
1165 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1166 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1167 return new_type;
1168
1169 case RECORD_TYPE:
1170 /* Do something if this is a fat pointer, in which case we
1171 may need to return the thin pointer. */
1172 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1173 {
1174 scalar_int_mode p_mode;
1175 if (!int_mode_for_size (size, 0).exists (&p_mode)
1176 || !targetm.valid_pointer_mode (p_mode))
1177 p_mode = ptr_mode;
1178 return
1179 build_pointer_type_for_mode
1180 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1181 p_mode, 0);
1182 }
1183 break;
1184
1185 case POINTER_TYPE:
1186 /* Only do something if this is a thin pointer, in which case we
1187 may need to return the fat pointer. */
1188 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1189 return
1190 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1191 break;
1192
1193 default:
1194 break;
1195 }
1196
1197 return type;
1198 }
1199
1200 /* Return true iff the padded types are equivalent. */
1201
1202 bool
1203 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1204 {
1205 tree type1, type2;
1206
1207 if (t1->hash != t2->hash)
1208 return 0;
1209
1210 type1 = t1->type;
1211 type2 = t2->type;
1212
1213 /* We consider that the padded types are equivalent if they pad the same type
1214 and have the same size, alignment, RM size and storage order. Taking the
1215 mode into account is redundant since it is determined by the others. */
1216 return
1217 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1218 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1219 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1220 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1221 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1222 }
1223
1224 /* Look up the padded TYPE in the hash table and return its canonical version
1225 if it exists; otherwise, insert it into the hash table. */
1226
1227 static tree
1228 lookup_and_insert_pad_type (tree type)
1229 {
1230 hashval_t hashcode;
1231 struct pad_type_hash in, *h;
1232
1233 hashcode
1234 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1235 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1236 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1237 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1238
1239 in.hash = hashcode;
1240 in.type = type;
1241 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1242 if (h)
1243 return h->type;
1244
1245 h = ggc_alloc<pad_type_hash> ();
1246 h->hash = hashcode;
1247 h->type = type;
1248 *pad_type_hash_table->find_slot_with_hash (h, hashcode, INSERT) = h;
1249 return NULL_TREE;
1250 }
1251
1252 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1253 if needed. We have already verified that SIZE and ALIGN are large enough.
1254 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1255 IS_COMPONENT_TYPE is true if this is being done for the component type of
1256 an array. IS_USER_TYPE is true if the original type needs to be completed.
1257 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1258 the RM size of the resulting type is to be set to SIZE too; in this case,
1259 the padded type is canonicalized before being returned. */
1260
1261 tree
1262 maybe_pad_type (tree type, tree size, unsigned int align,
1263 Entity_Id gnat_entity, bool is_component_type,
1264 bool is_user_type, bool definition, bool set_rm_size)
1265 {
1266 tree orig_size = TYPE_SIZE (type);
1267 unsigned int orig_align = TYPE_ALIGN (type);
1268 tree record, field;
1269
1270 /* If TYPE is a padded type, see if it agrees with any size and alignment
1271 we were given. If so, return the original type. Otherwise, strip
1272 off the padding, since we will either be returning the inner type
1273 or repadding it. If no size or alignment is specified, use that of
1274 the original padded type. */
1275 if (TYPE_IS_PADDING_P (type))
1276 {
1277 if ((!size
1278 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1279 && (align == 0 || align == orig_align))
1280 return type;
1281
1282 if (!size)
1283 size = orig_size;
1284 if (align == 0)
1285 align = orig_align;
1286
1287 type = TREE_TYPE (TYPE_FIELDS (type));
1288 orig_size = TYPE_SIZE (type);
1289 orig_align = TYPE_ALIGN (type);
1290 }
1291
1292 /* If the size is either not being changed or is being made smaller (which
1293 is not done here and is only valid for bitfields anyway), show the size
1294 isn't changing. Likewise, clear the alignment if it isn't being
1295 changed. Then return if we aren't doing anything. */
1296 if (size
1297 && (operand_equal_p (size, orig_size, 0)
1298 || (TREE_CODE (orig_size) == INTEGER_CST
1299 && tree_int_cst_lt (size, orig_size))))
1300 size = NULL_TREE;
1301
1302 if (align == orig_align)
1303 align = 0;
1304
1305 if (align == 0 && !size)
1306 return type;
1307
1308 /* If requested, complete the original type and give it a name. */
1309 if (is_user_type)
1310 create_type_decl (get_entity_name (gnat_entity), type,
1311 !Comes_From_Source (gnat_entity),
1312 !(TYPE_NAME (type)
1313 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1314 && DECL_IGNORED_P (TYPE_NAME (type))),
1315 gnat_entity);
1316
1317 /* We used to modify the record in place in some cases, but that could
1318 generate incorrect debugging information. So make a new record
1319 type and name. */
1320 record = make_node (RECORD_TYPE);
1321 TYPE_PADDING_P (record) = 1;
1322
1323 /* ??? Padding types around packed array implementation types will be
1324 considered as root types in the array descriptor language hook (see
1325 gnat_get_array_descr_info). Give them the original packed array type
1326 name so that the one coming from sources appears in the debugging
1327 information. */
1328 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1329 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1330 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1331 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1332 else if (Present (gnat_entity))
1333 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1334
1335 SET_TYPE_ALIGN (record, align ? align : orig_align);
1336 TYPE_SIZE (record) = size ? size : orig_size;
1337 TYPE_SIZE_UNIT (record)
1338 = convert (sizetype,
1339 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1340 bitsize_unit_node));
1341
1342 /* If we are changing the alignment and the input type is a record with
1343 BLKmode and a small constant size, try to make a form that has an
1344 integral mode. This might allow the padding record to also have an
1345 integral mode, which will be much more efficient. There is no point
1346 in doing so if a size is specified unless it is also a small constant
1347 size and it is incorrect to do so if we cannot guarantee that the mode
1348 will be naturally aligned since the field must always be addressable.
1349
1350 ??? This might not always be a win when done for a stand-alone object:
1351 since the nominal and the effective type of the object will now have
1352 different modes, a VIEW_CONVERT_EXPR will be required for converting
1353 between them and it might be hard to overcome afterwards, including
1354 at the RTL level when the stand-alone object is accessed as a whole. */
1355 if (align != 0
1356 && RECORD_OR_UNION_TYPE_P (type)
1357 && TYPE_MODE (type) == BLKmode
1358 && !TYPE_BY_REFERENCE_P (type)
1359 && TREE_CODE (orig_size) == INTEGER_CST
1360 && !TREE_OVERFLOW (orig_size)
1361 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1362 && (!size
1363 || (TREE_CODE (size) == INTEGER_CST
1364 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1365 {
1366 tree packable_type = make_packable_type (type, true);
1367 if (TYPE_MODE (packable_type) != BLKmode
1368 && align >= TYPE_ALIGN (packable_type))
1369 type = packable_type;
1370 }
1371
1372 /* Now create the field with the original size. */
1373 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1374 bitsize_zero_node, 0, 1);
1375 DECL_INTERNAL_P (field) = 1;
1376
1377 /* We will output additional debug info manually below. */
1378 finish_record_type (record, field, 1, false);
1379
1380 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1381 SET_TYPE_DEBUG_TYPE (record, type);
1382
1383 /* Set the RM size if requested. */
1384 if (set_rm_size)
1385 {
1386 tree canonical_pad_type;
1387
1388 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1389
1390 /* If the padded type is complete and has constant size, we canonicalize
1391 it by means of the hash table. This is consistent with the language
1392 semantics and ensures that gigi and the middle-end have a common view
1393 of these padded types. */
1394 if (TREE_CONSTANT (TYPE_SIZE (record))
1395 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1396 {
1397 record = canonical_pad_type;
1398 goto built;
1399 }
1400 }
1401
1402 /* Unless debugging information isn't being written for the input type,
1403 write a record that shows what we are a subtype of and also make a
1404 variable that indicates our size, if still variable. */
1405 if (TREE_CODE (orig_size) != INTEGER_CST
1406 && TYPE_NAME (record)
1407 && TYPE_NAME (type)
1408 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1409 && DECL_IGNORED_P (TYPE_NAME (type))))
1410 {
1411 tree name = TYPE_IDENTIFIER (record);
1412 tree size_unit = TYPE_SIZE_UNIT (record);
1413
1414 /* A variable that holds the size is required even with no encoding since
1415 it will be referenced by debugging information attributes. At global
1416 level, we need a single variable across all translation units. */
1417 if (size
1418 && TREE_CODE (size) != INTEGER_CST
1419 && (definition || global_bindings_p ()))
1420 {
1421 /* Whether or not gnat_entity comes from source, this XVZ variable is
1422 is a compilation artifact. */
1423 size_unit
1424 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1425 size_unit, true, global_bindings_p (),
1426 !definition && global_bindings_p (), false,
1427 false, true, true, NULL, gnat_entity);
1428 TYPE_SIZE_UNIT (record) = size_unit;
1429 }
1430
1431 /* There is no need to show what we are a subtype of when outputting as
1432 few encodings as possible: regular debugging infomation makes this
1433 redundant. */
1434 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1435 {
1436 tree marker = make_node (RECORD_TYPE);
1437 tree orig_name = TYPE_IDENTIFIER (type);
1438
1439 TYPE_NAME (marker) = concat_name (name, "XVS");
1440 finish_record_type (marker,
1441 create_field_decl (orig_name,
1442 build_reference_type (type),
1443 marker, NULL_TREE, NULL_TREE,
1444 0, 0),
1445 0, true);
1446 TYPE_SIZE_UNIT (marker) = size_unit;
1447
1448 add_parallel_type (record, marker);
1449 }
1450 }
1451
1452 built:
1453 /* If a simple size was explicitly given, maybe issue a warning. */
1454 if (!size
1455 || TREE_CODE (size) == COND_EXPR
1456 || TREE_CODE (size) == MAX_EXPR
1457 || No (gnat_entity))
1458 return record;
1459
1460 /* But don't do it if we are just annotating types and the type is tagged or
1461 concurrent, since these types aren't fully laid out in this mode. */
1462 if (type_annotate_only)
1463 {
1464 Entity_Id gnat_type
1465 = is_component_type
1466 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1467
1468 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1469 return record;
1470 }
1471
1472 /* Take the original size as the maximum size of the input if there was an
1473 unconstrained record involved and round it up to the specified alignment,
1474 if one was specified, but only for aggregate types. */
1475 if (CONTAINS_PLACEHOLDER_P (orig_size))
1476 orig_size = max_size (orig_size, true);
1477
1478 if (align && AGGREGATE_TYPE_P (type))
1479 orig_size = round_up (orig_size, align);
1480
1481 if (!operand_equal_p (size, orig_size, 0)
1482 && !(TREE_CODE (size) == INTEGER_CST
1483 && TREE_CODE (orig_size) == INTEGER_CST
1484 && (TREE_OVERFLOW (size)
1485 || TREE_OVERFLOW (orig_size)
1486 || tree_int_cst_lt (size, orig_size))))
1487 {
1488 Node_Id gnat_error_node = Empty;
1489
1490 /* For a packed array, post the message on the original array type. */
1491 if (Is_Packed_Array_Impl_Type (gnat_entity))
1492 gnat_entity = Original_Array_Type (gnat_entity);
1493
1494 if ((Ekind (gnat_entity) == E_Component
1495 || Ekind (gnat_entity) == E_Discriminant)
1496 && Present (Component_Clause (gnat_entity)))
1497 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1498 else if (Present (Size_Clause (gnat_entity)))
1499 gnat_error_node = Expression (Size_Clause (gnat_entity));
1500
1501 /* Generate message only for entities that come from source, since
1502 if we have an entity created by expansion, the message will be
1503 generated for some other corresponding source entity. */
1504 if (Comes_From_Source (gnat_entity))
1505 {
1506 if (Present (gnat_error_node))
1507 post_error_ne_tree ("{^ }bits of & unused?",
1508 gnat_error_node, gnat_entity,
1509 size_diffop (size, orig_size));
1510 else if (is_component_type)
1511 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1512 gnat_entity, gnat_entity,
1513 size_diffop (size, orig_size));
1514 }
1515 }
1516
1517 return record;
1518 }
1519
1520 /* Return a copy of the padded TYPE but with reverse storage order. */
1521
1522 tree
1523 set_reverse_storage_order_on_pad_type (tree type)
1524 {
1525 tree field, canonical_pad_type;
1526
1527 if (flag_checking)
1528 {
1529 /* If the inner type is not scalar then the function does nothing. */
1530 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1531 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1532 && !VECTOR_TYPE_P (inner_type));
1533 }
1534
1535 /* This is required for the canonicalization. */
1536 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1537
1538 field = copy_node (TYPE_FIELDS (type));
1539 type = copy_type (type);
1540 DECL_CONTEXT (field) = type;
1541 TYPE_FIELDS (type) = field;
1542 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1543 canonical_pad_type = lookup_and_insert_pad_type (type);
1544 return canonical_pad_type ? canonical_pad_type : type;
1545 }
1546 \f
1547 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1548 If this is a multi-dimensional array type, do this recursively.
1549
1550 OP may be
1551 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1552 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1553 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1554
1555 void
1556 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1557 {
1558 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1559 of a one-dimensional array, since the padding has the same alias set
1560 as the field type, but if it's a multi-dimensional array, we need to
1561 see the inner types. */
1562 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1563 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1564 || TYPE_PADDING_P (gnu_old_type)))
1565 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1566
1567 /* Unconstrained array types are deemed incomplete and would thus be given
1568 alias set 0. Retrieve the underlying array type. */
1569 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1570 gnu_old_type
1571 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1572 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1573 gnu_new_type
1574 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1575
1576 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1577 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1578 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1579 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1580
1581 switch (op)
1582 {
1583 case ALIAS_SET_COPY:
1584 /* The alias set shouldn't be copied between array types with different
1585 aliasing settings because this can break the aliasing relationship
1586 between the array type and its element type. */
1587 if (flag_checking || flag_strict_aliasing)
1588 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1589 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1590 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1591 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1592
1593 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1594 break;
1595
1596 case ALIAS_SET_SUBSET:
1597 case ALIAS_SET_SUPERSET:
1598 {
1599 alias_set_type old_set = get_alias_set (gnu_old_type);
1600 alias_set_type new_set = get_alias_set (gnu_new_type);
1601
1602 /* Do nothing if the alias sets conflict. This ensures that we
1603 never call record_alias_subset several times for the same pair
1604 or at all for alias set 0. */
1605 if (!alias_sets_conflict_p (old_set, new_set))
1606 {
1607 if (op == ALIAS_SET_SUBSET)
1608 record_alias_subset (old_set, new_set);
1609 else
1610 record_alias_subset (new_set, old_set);
1611 }
1612 }
1613 break;
1614
1615 default:
1616 gcc_unreachable ();
1617 }
1618
1619 record_component_aliases (gnu_new_type);
1620 }
1621 \f
1622 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1623 ARTIFICIAL_P is true if the type was generated by the compiler. */
1624
1625 void
1626 record_builtin_type (const char *name, tree type, bool artificial_p)
1627 {
1628 tree type_decl = build_decl (input_location,
1629 TYPE_DECL, get_identifier (name), type);
1630 DECL_ARTIFICIAL (type_decl) = artificial_p;
1631 TYPE_ARTIFICIAL (type) = artificial_p;
1632 gnat_pushdecl (type_decl, Empty);
1633
1634 if (debug_hooks->type_decl)
1635 debug_hooks->type_decl (type_decl, false);
1636 }
1637 \f
1638 /* Finish constructing the character type CHAR_TYPE.
1639
1640 In Ada character types are enumeration types and, as a consequence, are
1641 represented in the front-end by integral types holding the positions of
1642 the enumeration values as defined by the language, which means that the
1643 integral types are unsigned.
1644
1645 Unfortunately the signedness of 'char' in C is implementation-defined
1646 and GCC even has the option -fsigned-char to toggle it at run time.
1647 Since GNAT's philosophy is to be compatible with C by default, to wit
1648 Interfaces.C.char is defined as a mere copy of Character, we may need
1649 to declare character types as signed types in GENERIC and generate the
1650 necessary adjustments to make them behave as unsigned types.
1651
1652 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1653 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1654 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1655 types. The idea is to ensure that the bit pattern contained in the
1656 Esize'd objects is not changed, even though the numerical value will
1657 be interpreted differently depending on the signedness. */
1658
1659 void
1660 finish_character_type (tree char_type)
1661 {
1662 if (TYPE_UNSIGNED (char_type))
1663 return;
1664
1665 /* Make a copy of a generic unsigned version since we'll modify it. */
1666 tree unsigned_char_type
1667 = (char_type == char_type_node
1668 ? unsigned_char_type_node
1669 : copy_type (gnat_unsigned_type_for (char_type)));
1670
1671 /* Create an unsigned version of the type and set it as debug type. */
1672 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1673 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1674 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1675 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1676
1677 /* If this is a subtype, make the debug type a subtype of the debug type
1678 of the base type and convert literal RM bounds to unsigned. */
1679 if (TREE_TYPE (char_type))
1680 {
1681 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1682 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1683 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1684
1685 if (TREE_CODE (min_value) == INTEGER_CST)
1686 min_value = fold_convert (base_unsigned_char_type, min_value);
1687 if (TREE_CODE (max_value) == INTEGER_CST)
1688 max_value = fold_convert (base_unsigned_char_type, max_value);
1689
1690 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1691 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1692 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1693 }
1694
1695 /* Adjust the RM bounds of the original type to unsigned; that's especially
1696 important for types since they are implicit in this case. */
1697 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1698 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1699 }
1700
1701 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1702 finish constructing the record type as a fat pointer type. */
1703
1704 void
1705 finish_fat_pointer_type (tree record_type, tree field_list)
1706 {
1707 /* Make sure we can put it into a register. */
1708 if (STRICT_ALIGNMENT)
1709 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1710
1711 /* Show what it really is. */
1712 TYPE_FAT_POINTER_P (record_type) = 1;
1713
1714 /* Do not emit debug info for it since the types of its fields may still be
1715 incomplete at this point. */
1716 finish_record_type (record_type, field_list, 0, false);
1717
1718 /* Force type_contains_placeholder_p to return true on it. Although the
1719 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1720 type but the representation of the unconstrained array. */
1721 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1722 }
1723
1724 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1725 finish constructing the record or union type. If REP_LEVEL is zero, this
1726 record has no representation clause and so will be entirely laid out here.
1727 If REP_LEVEL is one, this record has a representation clause and has been
1728 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1729 this record is derived from a parent record and thus inherits its layout;
1730 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1731 additional debug info needs to be output for this type. */
1732
1733 void
1734 finish_record_type (tree record_type, tree field_list, int rep_level,
1735 bool debug_info_p)
1736 {
1737 enum tree_code code = TREE_CODE (record_type);
1738 tree name = TYPE_IDENTIFIER (record_type);
1739 tree ada_size = bitsize_zero_node;
1740 tree size = bitsize_zero_node;
1741 bool had_size = TYPE_SIZE (record_type) != 0;
1742 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1743 bool had_align = TYPE_ALIGN (record_type) != 0;
1744 tree field;
1745
1746 TYPE_FIELDS (record_type) = field_list;
1747
1748 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1749 generate debug info and have a parallel type. */
1750 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1751
1752 /* Globally initialize the record first. If this is a rep'ed record,
1753 that just means some initializations; otherwise, layout the record. */
1754 if (rep_level > 0)
1755 {
1756 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1757 TYPE_ALIGN (record_type)));
1758
1759 if (!had_size_unit)
1760 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1761
1762 if (!had_size)
1763 TYPE_SIZE (record_type) = bitsize_zero_node;
1764
1765 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1766 out just like a UNION_TYPE, since the size will be fixed. */
1767 else if (code == QUAL_UNION_TYPE)
1768 code = UNION_TYPE;
1769 }
1770 else
1771 {
1772 /* Ensure there isn't a size already set. There can be in an error
1773 case where there is a rep clause but all fields have errors and
1774 no longer have a position. */
1775 TYPE_SIZE (record_type) = 0;
1776
1777 /* Ensure we use the traditional GCC layout for bitfields when we need
1778 to pack the record type or have a representation clause. The other
1779 possible layout (Microsoft C compiler), if available, would prevent
1780 efficient packing in almost all cases. */
1781 #ifdef TARGET_MS_BITFIELD_LAYOUT
1782 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1783 decl_attributes (&record_type,
1784 tree_cons (get_identifier ("gcc_struct"),
1785 NULL_TREE, NULL_TREE),
1786 ATTR_FLAG_TYPE_IN_PLACE);
1787 #endif
1788
1789 layout_type (record_type);
1790 }
1791
1792 /* At this point, the position and size of each field is known. It was
1793 either set before entry by a rep clause, or by laying out the type above.
1794
1795 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1796 to compute the Ada size; the GCC size and alignment (for rep'ed records
1797 that are not padding types); and the mode (for rep'ed records). We also
1798 clear the DECL_BIT_FIELD indication for the cases we know have not been
1799 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1800
1801 if (code == QUAL_UNION_TYPE)
1802 field_list = nreverse (field_list);
1803
1804 for (field = field_list; field; field = DECL_CHAIN (field))
1805 {
1806 tree type = TREE_TYPE (field);
1807 tree pos = bit_position (field);
1808 tree this_size = DECL_SIZE (field);
1809 tree this_ada_size;
1810
1811 if (RECORD_OR_UNION_TYPE_P (type)
1812 && !TYPE_FAT_POINTER_P (type)
1813 && !TYPE_CONTAINS_TEMPLATE_P (type)
1814 && TYPE_ADA_SIZE (type))
1815 this_ada_size = TYPE_ADA_SIZE (type);
1816 else
1817 this_ada_size = this_size;
1818
1819 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1820 if (DECL_BIT_FIELD (field)
1821 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1822 {
1823 unsigned int align = TYPE_ALIGN (type);
1824
1825 /* In the general case, type alignment is required. */
1826 if (value_factor_p (pos, align))
1827 {
1828 /* The enclosing record type must be sufficiently aligned.
1829 Otherwise, if no alignment was specified for it and it
1830 has been laid out already, bump its alignment to the
1831 desired one if this is compatible with its size and
1832 maximum alignment, if any. */
1833 if (TYPE_ALIGN (record_type) >= align)
1834 {
1835 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1836 DECL_BIT_FIELD (field) = 0;
1837 }
1838 else if (!had_align
1839 && rep_level == 0
1840 && value_factor_p (TYPE_SIZE (record_type), align)
1841 && (!TYPE_MAX_ALIGN (record_type)
1842 || TYPE_MAX_ALIGN (record_type) >= align))
1843 {
1844 SET_TYPE_ALIGN (record_type, align);
1845 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1846 DECL_BIT_FIELD (field) = 0;
1847 }
1848 }
1849
1850 /* In the non-strict alignment case, only byte alignment is. */
1851 if (!STRICT_ALIGNMENT
1852 && DECL_BIT_FIELD (field)
1853 && value_factor_p (pos, BITS_PER_UNIT))
1854 DECL_BIT_FIELD (field) = 0;
1855 }
1856
1857 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1858 field is technically not addressable. Except that it can actually
1859 be addressed if it is BLKmode and happens to be properly aligned. */
1860 if (DECL_BIT_FIELD (field)
1861 && !(DECL_MODE (field) == BLKmode
1862 && value_factor_p (pos, BITS_PER_UNIT)))
1863 DECL_NONADDRESSABLE_P (field) = 1;
1864
1865 /* A type must be as aligned as its most aligned field that is not
1866 a bit-field. But this is already enforced by layout_type. */
1867 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1868 SET_TYPE_ALIGN (record_type,
1869 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1870
1871 switch (code)
1872 {
1873 case UNION_TYPE:
1874 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1875 size = size_binop (MAX_EXPR, size, this_size);
1876 break;
1877
1878 case QUAL_UNION_TYPE:
1879 ada_size
1880 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1881 this_ada_size, ada_size);
1882 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1883 this_size, size);
1884 break;
1885
1886 case RECORD_TYPE:
1887 /* Since we know here that all fields are sorted in order of
1888 increasing bit position, the size of the record is one
1889 higher than the ending bit of the last field processed
1890 unless we have a rep clause, since in that case we might
1891 have a field outside a QUAL_UNION_TYPE that has a higher ending
1892 position. So use a MAX in that case. Also, if this field is a
1893 QUAL_UNION_TYPE, we need to take into account the previous size in
1894 the case of empty variants. */
1895 ada_size
1896 = merge_sizes (ada_size, pos, this_ada_size,
1897 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1898 size
1899 = merge_sizes (size, pos, this_size,
1900 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1901 break;
1902
1903 default:
1904 gcc_unreachable ();
1905 }
1906 }
1907
1908 if (code == QUAL_UNION_TYPE)
1909 nreverse (field_list);
1910
1911 if (rep_level < 2)
1912 {
1913 /* If this is a padding record, we never want to make the size smaller
1914 than what was specified in it, if any. */
1915 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1916 size = TYPE_SIZE (record_type);
1917
1918 /* Now set any of the values we've just computed that apply. */
1919 if (!TYPE_FAT_POINTER_P (record_type)
1920 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1921 SET_TYPE_ADA_SIZE (record_type, ada_size);
1922
1923 if (rep_level > 0)
1924 {
1925 tree size_unit = had_size_unit
1926 ? TYPE_SIZE_UNIT (record_type)
1927 : convert (sizetype,
1928 size_binop (CEIL_DIV_EXPR, size,
1929 bitsize_unit_node));
1930 unsigned int align = TYPE_ALIGN (record_type);
1931
1932 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1933 TYPE_SIZE_UNIT (record_type)
1934 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1935
1936 compute_record_mode (record_type);
1937 }
1938 }
1939
1940 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1941 TYPE_MAX_ALIGN (record_type) = 0;
1942
1943 if (debug_info_p)
1944 rest_of_record_type_compilation (record_type);
1945 }
1946
1947 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1948 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1949 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1950 moment TYPE will get a context. */
1951
1952 void
1953 add_parallel_type (tree type, tree parallel_type)
1954 {
1955 tree decl = TYPE_STUB_DECL (type);
1956
1957 while (DECL_PARALLEL_TYPE (decl))
1958 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1959
1960 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1961
1962 /* If PARALLEL_TYPE already has a context, we are done. */
1963 if (TYPE_CONTEXT (parallel_type))
1964 return;
1965
1966 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
1967 it to PARALLEL_TYPE. */
1968 if (TYPE_CONTEXT (type))
1969 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1970
1971 /* Otherwise TYPE has not context yet. We know it will have one thanks to
1972 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
1973 so we have nothing to do in this case. */
1974 }
1975
1976 /* Return true if TYPE has a parallel type. */
1977
1978 static bool
1979 has_parallel_type (tree type)
1980 {
1981 tree decl = TYPE_STUB_DECL (type);
1982
1983 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1984 }
1985
1986 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
1987 associated with it. It need not be invoked directly in most cases as
1988 finish_record_type takes care of doing so. */
1989
1990 void
1991 rest_of_record_type_compilation (tree record_type)
1992 {
1993 bool var_size = false;
1994 tree field;
1995
1996 /* If this is a padded type, the bulk of the debug info has already been
1997 generated for the field's type. */
1998 if (TYPE_IS_PADDING_P (record_type))
1999 return;
2000
2001 /* If the type already has a parallel type (XVS type), then we're done. */
2002 if (has_parallel_type (record_type))
2003 return;
2004
2005 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2006 {
2007 /* We need to make an XVE/XVU record if any field has variable size,
2008 whether or not the record does. For example, if we have a union,
2009 it may be that all fields, rounded up to the alignment, have the
2010 same size, in which case we'll use that size. But the debug
2011 output routines (except Dwarf2) won't be able to output the fields,
2012 so we need to make the special record. */
2013 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2014 /* If a field has a non-constant qualifier, the record will have
2015 variable size too. */
2016 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2017 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2018 {
2019 var_size = true;
2020 break;
2021 }
2022 }
2023
2024 /* If this record type is of variable size, make a parallel record type that
2025 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2026 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2027 {
2028 tree new_record_type
2029 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2030 ? UNION_TYPE : TREE_CODE (record_type));
2031 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2032 tree last_pos = bitsize_zero_node;
2033 tree old_field, prev_old_field = NULL_TREE;
2034
2035 new_name
2036 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2037 ? "XVU" : "XVE");
2038 TYPE_NAME (new_record_type) = new_name;
2039 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2040 TYPE_STUB_DECL (new_record_type)
2041 = create_type_stub_decl (new_name, new_record_type);
2042 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2043 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2044 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2045 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2046 TYPE_SIZE_UNIT (new_record_type)
2047 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2048
2049 /* Now scan all the fields, replacing each field with a new field
2050 corresponding to the new encoding. */
2051 for (old_field = TYPE_FIELDS (record_type); old_field;
2052 old_field = DECL_CHAIN (old_field))
2053 {
2054 tree field_type = TREE_TYPE (old_field);
2055 tree field_name = DECL_NAME (old_field);
2056 tree curpos = fold_bit_position (old_field);
2057 tree pos, new_field;
2058 bool var = false;
2059 unsigned int align = 0;
2060
2061 /* See how the position was modified from the last position.
2062
2063 There are two basic cases we support: a value was added
2064 to the last position or the last position was rounded to
2065 a boundary and they something was added. Check for the
2066 first case first. If not, see if there is any evidence
2067 of rounding. If so, round the last position and retry.
2068
2069 If this is a union, the position can be taken as zero. */
2070 if (TREE_CODE (new_record_type) == UNION_TYPE)
2071 pos = bitsize_zero_node;
2072 else
2073 pos = compute_related_constant (curpos, last_pos);
2074
2075 if (!pos
2076 && TREE_CODE (curpos) == MULT_EXPR
2077 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2078 {
2079 tree offset = TREE_OPERAND (curpos, 0);
2080 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2081 align = scale_by_factor_of (offset, align);
2082 last_pos = round_up (last_pos, align);
2083 pos = compute_related_constant (curpos, last_pos);
2084 }
2085 else if (!pos
2086 && TREE_CODE (curpos) == PLUS_EXPR
2087 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2088 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2089 && tree_fits_uhwi_p
2090 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2091 {
2092 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2093 unsigned HOST_WIDE_INT addend
2094 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2095 align
2096 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2097 align = scale_by_factor_of (offset, align);
2098 align = MIN (align, addend & -addend);
2099 last_pos = round_up (last_pos, align);
2100 pos = compute_related_constant (curpos, last_pos);
2101 }
2102 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2103 {
2104 align = TYPE_ALIGN (field_type);
2105 last_pos = round_up (last_pos, align);
2106 pos = compute_related_constant (curpos, last_pos);
2107 }
2108
2109 /* If we can't compute a position, set it to zero.
2110
2111 ??? We really should abort here, but it's too much work
2112 to get this correct for all cases. */
2113 if (!pos)
2114 pos = bitsize_zero_node;
2115
2116 /* See if this type is variable-sized and make a pointer type
2117 and indicate the indirection if so. Beware that the debug
2118 back-end may adjust the position computed above according
2119 to the alignment of the field type, i.e. the pointer type
2120 in this case, if we don't preventively counter that. */
2121 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2122 {
2123 field_type = build_pointer_type (field_type);
2124 if (align != 0 && TYPE_ALIGN (field_type) > align)
2125 {
2126 field_type = copy_type (field_type);
2127 SET_TYPE_ALIGN (field_type, align);
2128 }
2129 var = true;
2130 }
2131
2132 /* Make a new field name, if necessary. */
2133 if (var || align != 0)
2134 {
2135 char suffix[16];
2136
2137 if (align != 0)
2138 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2139 align / BITS_PER_UNIT);
2140 else
2141 strcpy (suffix, "XVL");
2142
2143 field_name = concat_name (field_name, suffix);
2144 }
2145
2146 new_field
2147 = create_field_decl (field_name, field_type, new_record_type,
2148 DECL_SIZE (old_field), pos, 0, 0);
2149 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2150 TYPE_FIELDS (new_record_type) = new_field;
2151
2152 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2153 zero. The only time it's not the last field of the record
2154 is when there are other components at fixed positions after
2155 it (meaning there was a rep clause for every field) and we
2156 want to be able to encode them. */
2157 last_pos = size_binop (PLUS_EXPR, curpos,
2158 (TREE_CODE (TREE_TYPE (old_field))
2159 == QUAL_UNION_TYPE)
2160 ? bitsize_zero_node
2161 : DECL_SIZE (old_field));
2162 prev_old_field = old_field;
2163 }
2164
2165 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2166
2167 add_parallel_type (record_type, new_record_type);
2168 }
2169 }
2170
2171 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2172 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2173 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2174 replace a value of zero with the old size. If HAS_REP is true, we take the
2175 MAX of the end position of this field with LAST_SIZE. In all other cases,
2176 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2177
2178 static tree
2179 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2180 bool has_rep)
2181 {
2182 tree type = TREE_TYPE (last_size);
2183 tree new_size;
2184
2185 if (!special || TREE_CODE (size) != COND_EXPR)
2186 {
2187 new_size = size_binop (PLUS_EXPR, first_bit, size);
2188 if (has_rep)
2189 new_size = size_binop (MAX_EXPR, last_size, new_size);
2190 }
2191
2192 else
2193 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2194 integer_zerop (TREE_OPERAND (size, 1))
2195 ? last_size : merge_sizes (last_size, first_bit,
2196 TREE_OPERAND (size, 1),
2197 1, has_rep),
2198 integer_zerop (TREE_OPERAND (size, 2))
2199 ? last_size : merge_sizes (last_size, first_bit,
2200 TREE_OPERAND (size, 2),
2201 1, has_rep));
2202
2203 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2204 when fed through substitute_in_expr) into thinking that a constant
2205 size is not constant. */
2206 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2207 new_size = TREE_OPERAND (new_size, 0);
2208
2209 return new_size;
2210 }
2211
2212 /* Return the bit position of FIELD, in bits from the start of the record,
2213 and fold it as much as possible. This is a tree of type bitsizetype. */
2214
2215 static tree
2216 fold_bit_position (const_tree field)
2217 {
2218 tree offset = DECL_FIELD_OFFSET (field);
2219 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2220 offset = size_binop (TREE_CODE (offset),
2221 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2222 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2223 else
2224 offset = fold_convert (bitsizetype, offset);
2225 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2226 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2227 }
2228
2229 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2230 related by the addition of a constant. Return that constant if so. */
2231
2232 static tree
2233 compute_related_constant (tree op0, tree op1)
2234 {
2235 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2236
2237 if (TREE_CODE (op0) == MULT_EXPR
2238 && TREE_CODE (op1) == MULT_EXPR
2239 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2240 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2241 {
2242 factor = TREE_OPERAND (op0, 1);
2243 op0 = TREE_OPERAND (op0, 0);
2244 op1 = TREE_OPERAND (op1, 0);
2245 }
2246 else
2247 factor = NULL_TREE;
2248
2249 op0_cst = split_plus (op0, &op0_var);
2250 op1_cst = split_plus (op1, &op1_var);
2251 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2252
2253 if (operand_equal_p (op0_var, op1_var, 0))
2254 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2255
2256 return NULL_TREE;
2257 }
2258
2259 /* Utility function of above to split a tree OP which may be a sum, into a
2260 constant part, which is returned, and a variable part, which is stored
2261 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2262 bitsizetype. */
2263
2264 static tree
2265 split_plus (tree in, tree *pvar)
2266 {
2267 /* Strip conversions in order to ease the tree traversal and maximize the
2268 potential for constant or plus/minus discovery. We need to be careful
2269 to always return and set *pvar to bitsizetype trees, but it's worth
2270 the effort. */
2271 in = remove_conversions (in, false);
2272
2273 *pvar = convert (bitsizetype, in);
2274
2275 if (TREE_CODE (in) == INTEGER_CST)
2276 {
2277 *pvar = bitsize_zero_node;
2278 return convert (bitsizetype, in);
2279 }
2280 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2281 {
2282 tree lhs_var, rhs_var;
2283 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2284 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2285
2286 if (lhs_var == TREE_OPERAND (in, 0)
2287 && rhs_var == TREE_OPERAND (in, 1))
2288 return bitsize_zero_node;
2289
2290 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2291 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2292 }
2293 else
2294 return bitsize_zero_node;
2295 }
2296 \f
2297 /* Return a copy of TYPE but safe to modify in any way. */
2298
2299 tree
2300 copy_type (tree type)
2301 {
2302 tree new_type = copy_node (type);
2303
2304 /* Unshare the language-specific data. */
2305 if (TYPE_LANG_SPECIFIC (type))
2306 {
2307 TYPE_LANG_SPECIFIC (new_type) = NULL;
2308 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2309 }
2310
2311 /* And the contents of the language-specific slot if needed. */
2312 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2313 && TYPE_RM_VALUES (type))
2314 {
2315 TYPE_RM_VALUES (new_type) = NULL_TREE;
2316 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2317 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2318 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2319 }
2320
2321 /* copy_node clears this field instead of copying it, because it is
2322 aliased with TREE_CHAIN. */
2323 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2324
2325 TYPE_POINTER_TO (new_type) = NULL_TREE;
2326 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2327 TYPE_MAIN_VARIANT (new_type) = new_type;
2328 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2329 TYPE_CANONICAL (new_type) = new_type;
2330
2331 return new_type;
2332 }
2333 \f
2334 /* Return a subtype of sizetype with range MIN to MAX and whose
2335 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2336 of the associated TYPE_DECL. */
2337
2338 tree
2339 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2340 {
2341 /* First build a type for the desired range. */
2342 tree type = build_nonshared_range_type (sizetype, min, max);
2343
2344 /* Then set the index type. */
2345 SET_TYPE_INDEX_TYPE (type, index);
2346 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2347
2348 return type;
2349 }
2350
2351 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2352 sizetype is used. */
2353
2354 tree
2355 create_range_type (tree type, tree min, tree max)
2356 {
2357 tree range_type;
2358
2359 if (!type)
2360 type = sizetype;
2361
2362 /* First build a type with the base range. */
2363 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2364 TYPE_MAX_VALUE (type));
2365
2366 /* Then set the actual range. */
2367 SET_TYPE_RM_MIN_VALUE (range_type, min);
2368 SET_TYPE_RM_MAX_VALUE (range_type, max);
2369
2370 return range_type;
2371 }
2372 \f
2373 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2374 NAME gives the name of the type to be used in the declaration. */
2375
2376 tree
2377 create_type_stub_decl (tree name, tree type)
2378 {
2379 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2380 DECL_ARTIFICIAL (type_decl) = 1;
2381 TYPE_ARTIFICIAL (type) = 1;
2382 return type_decl;
2383 }
2384
2385 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2386 used in the declaration. ARTIFICIAL_P is true if the declaration was
2387 generated by the compiler. DEBUG_INFO_P is true if we need to write
2388 debug information about this type. GNAT_NODE is used for the position
2389 of the decl. */
2390
2391 tree
2392 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2393 Node_Id gnat_node)
2394 {
2395 enum tree_code code = TREE_CODE (type);
2396 bool is_named
2397 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2398 tree type_decl;
2399
2400 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2401 gcc_assert (!TYPE_IS_DUMMY_P (type));
2402
2403 /* If the type hasn't been named yet, we're naming it; preserve an existing
2404 TYPE_STUB_DECL that has been attached to it for some purpose. */
2405 if (!is_named && TYPE_STUB_DECL (type))
2406 {
2407 type_decl = TYPE_STUB_DECL (type);
2408 DECL_NAME (type_decl) = name;
2409 }
2410 else
2411 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2412
2413 DECL_ARTIFICIAL (type_decl) = artificial_p;
2414 TYPE_ARTIFICIAL (type) = artificial_p;
2415
2416 /* Add this decl to the current binding level. */
2417 gnat_pushdecl (type_decl, gnat_node);
2418
2419 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2420 causes the name to be also viewed as a "tag" by the debug back-end, with
2421 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2422 types in DWARF.
2423
2424 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2425 from multiple contexts, and "type_decl" references a copy of it: in such a
2426 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2427 with the mechanism above. */
2428 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2429 TYPE_STUB_DECL (type) = type_decl;
2430
2431 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2432 back-end doesn't support, and for others if we don't need to. */
2433 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2434 DECL_IGNORED_P (type_decl) = 1;
2435
2436 return type_decl;
2437 }
2438 \f
2439 /* Return a VAR_DECL or CONST_DECL node.
2440
2441 NAME gives the name of the variable. ASM_NAME is its assembler name
2442 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2443 the GCC tree for an optional initial expression; NULL_TREE if none.
2444
2445 CONST_FLAG is true if this variable is constant, in which case we might
2446 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2447
2448 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2449 definition to be made visible outside of the current compilation unit, for
2450 instance variable definitions in a package specification.
2451
2452 EXTERN_FLAG is true when processing an external variable declaration (as
2453 opposed to a definition: no storage is to be allocated for the variable).
2454
2455 STATIC_FLAG is only relevant when not at top level and indicates whether
2456 to always allocate storage to the variable.
2457
2458 VOLATILE_FLAG is true if this variable is declared as volatile.
2459
2460 ARTIFICIAL_P is true if the variable was generated by the compiler.
2461
2462 DEBUG_INFO_P is true if we need to write debug information for it.
2463
2464 ATTR_LIST is the list of attributes to be attached to the variable.
2465
2466 GNAT_NODE is used for the position of the decl. */
2467
2468 tree
2469 create_var_decl (tree name, tree asm_name, tree type, tree init,
2470 bool const_flag, bool public_flag, bool extern_flag,
2471 bool static_flag, bool volatile_flag, bool artificial_p,
2472 bool debug_info_p, struct attrib *attr_list,
2473 Node_Id gnat_node, bool const_decl_allowed_p)
2474 {
2475 /* Whether the object has static storage duration, either explicitly or by
2476 virtue of being declared at the global level. */
2477 const bool static_storage = static_flag || global_bindings_p ();
2478
2479 /* Whether the initializer is constant: for an external object or an object
2480 with static storage duration, we check that the initializer is a valid
2481 constant expression for initializing a static variable; otherwise, we
2482 only check that it is constant. */
2483 const bool init_const
2484 = (init
2485 && gnat_types_compatible_p (type, TREE_TYPE (init))
2486 && (extern_flag || static_storage
2487 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2488 != NULL_TREE
2489 : TREE_CONSTANT (init)));
2490
2491 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2492 case the initializer may be used in lieu of the DECL node (as done in
2493 Identifier_to_gnu). This is useful to prevent the need of elaboration
2494 code when an identifier for which such a DECL is made is in turn used
2495 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2496 but extra constraints apply to this choice (see below) and they are not
2497 relevant to the distinction we wish to make. */
2498 const bool constant_p = const_flag && init_const;
2499
2500 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2501 and may be used for scalars in general but not for aggregates. */
2502 tree var_decl
2503 = build_decl (input_location,
2504 (constant_p
2505 && const_decl_allowed_p
2506 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2507 name, type);
2508
2509 /* Detect constants created by the front-end to hold 'reference to function
2510 calls for stabilization purposes. This is needed for renaming. */
2511 if (const_flag && init && POINTER_TYPE_P (type))
2512 {
2513 tree inner = init;
2514 if (TREE_CODE (inner) == COMPOUND_EXPR)
2515 inner = TREE_OPERAND (inner, 1);
2516 inner = remove_conversions (inner, true);
2517 if (TREE_CODE (inner) == ADDR_EXPR
2518 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2519 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2520 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2521 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2522 DECL_RETURN_VALUE_P (var_decl) = 1;
2523 }
2524
2525 /* If this is external, throw away any initializations (they will be done
2526 elsewhere) unless this is a constant for which we would like to remain
2527 able to get the initializer. If we are defining a global here, leave a
2528 constant initialization and save any variable elaborations for the
2529 elaboration routine. If we are just annotating types, throw away the
2530 initialization if it isn't a constant. */
2531 if ((extern_flag && !constant_p)
2532 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2533 init = NULL_TREE;
2534
2535 /* At the global level, a non-constant initializer generates elaboration
2536 statements. Check that such statements are allowed, that is to say,
2537 not violating a No_Elaboration_Code restriction. */
2538 if (init && !init_const && global_bindings_p ())
2539 Check_Elaboration_Code_Allowed (gnat_node);
2540
2541 /* Attach the initializer, if any. */
2542 DECL_INITIAL (var_decl) = init;
2543
2544 /* Directly set some flags. */
2545 DECL_ARTIFICIAL (var_decl) = artificial_p;
2546 DECL_EXTERNAL (var_decl) = extern_flag;
2547
2548 TREE_CONSTANT (var_decl) = constant_p;
2549 TREE_READONLY (var_decl) = const_flag;
2550
2551 /* The object is public if it is external or if it is declared public
2552 and has static storage duration. */
2553 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2554
2555 /* We need to allocate static storage for an object with static storage
2556 duration if it isn't external. */
2557 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2558
2559 TREE_SIDE_EFFECTS (var_decl)
2560 = TREE_THIS_VOLATILE (var_decl)
2561 = TYPE_VOLATILE (type) | volatile_flag;
2562
2563 if (TREE_SIDE_EFFECTS (var_decl))
2564 TREE_ADDRESSABLE (var_decl) = 1;
2565
2566 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2567 try to fiddle with DECL_COMMON. However, on platforms that don't
2568 support global BSS sections, uninitialized global variables would
2569 go in DATA instead, thus increasing the size of the executable. */
2570 if (!flag_no_common
2571 && TREE_CODE (var_decl) == VAR_DECL
2572 && TREE_PUBLIC (var_decl)
2573 && !have_global_bss_p ())
2574 DECL_COMMON (var_decl) = 1;
2575
2576 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2577 since we will create an associated variable. Likewise for an external
2578 constant whose initializer is not absolute, because this would mean a
2579 global relocation in a read-only section which runs afoul of the PE-COFF
2580 run-time relocation mechanism. */
2581 if (!debug_info_p
2582 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2583 || (extern_flag
2584 && constant_p
2585 && init
2586 && initializer_constant_valid_p (init, TREE_TYPE (init))
2587 != null_pointer_node))
2588 DECL_IGNORED_P (var_decl) = 1;
2589
2590 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2591 if (TREE_CODE (var_decl) == VAR_DECL)
2592 process_attributes (&var_decl, &attr_list, true, gnat_node);
2593
2594 /* Add this decl to the current binding level. */
2595 gnat_pushdecl (var_decl, gnat_node);
2596
2597 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2598 {
2599 /* Let the target mangle the name if this isn't a verbatim asm. */
2600 if (*IDENTIFIER_POINTER (asm_name) != '*')
2601 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2602
2603 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2604 }
2605
2606 return var_decl;
2607 }
2608 \f
2609 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2610
2611 static bool
2612 aggregate_type_contains_array_p (tree type)
2613 {
2614 switch (TREE_CODE (type))
2615 {
2616 case RECORD_TYPE:
2617 case UNION_TYPE:
2618 case QUAL_UNION_TYPE:
2619 {
2620 tree field;
2621 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2622 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2623 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2624 return true;
2625 return false;
2626 }
2627
2628 case ARRAY_TYPE:
2629 return true;
2630
2631 default:
2632 gcc_unreachable ();
2633 }
2634 }
2635
2636 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2637 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2638 is the specified size of the field. If POS is nonzero, it is the bit
2639 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2640 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2641 means we are allowed to take the address of the field; if it is negative,
2642 we should not make a bitfield, which is used by make_aligning_type. */
2643
2644 tree
2645 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2646 int packed, int addressable)
2647 {
2648 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2649
2650 DECL_CONTEXT (field_decl) = record_type;
2651 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2652
2653 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2654 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2655 Likewise for an aggregate without specified position that contains an
2656 array, because in this case slices of variable length of this array
2657 must be handled by GCC and variable-sized objects need to be aligned
2658 to at least a byte boundary. */
2659 if (packed && (TYPE_MODE (type) == BLKmode
2660 || (!pos
2661 && AGGREGATE_TYPE_P (type)
2662 && aggregate_type_contains_array_p (type))))
2663 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2664
2665 /* If a size is specified, use it. Otherwise, if the record type is packed
2666 compute a size to use, which may differ from the object's natural size.
2667 We always set a size in this case to trigger the checks for bitfield
2668 creation below, which is typically required when no position has been
2669 specified. */
2670 if (size)
2671 size = convert (bitsizetype, size);
2672 else if (packed == 1)
2673 {
2674 size = rm_size (type);
2675 if (TYPE_MODE (type) == BLKmode)
2676 size = round_up (size, BITS_PER_UNIT);
2677 }
2678
2679 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2680 specified for two reasons: first if the size differs from the natural
2681 size. Second, if the alignment is insufficient. There are a number of
2682 ways the latter can be true.
2683
2684 We never make a bitfield if the type of the field has a nonconstant size,
2685 because no such entity requiring bitfield operations should reach here.
2686
2687 We do *preventively* make a bitfield when there might be the need for it
2688 but we don't have all the necessary information to decide, as is the case
2689 of a field with no specified position in a packed record.
2690
2691 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2692 in layout_decl or finish_record_type to clear the bit_field indication if
2693 it is in fact not needed. */
2694 if (addressable >= 0
2695 && size
2696 && TREE_CODE (size) == INTEGER_CST
2697 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2698 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2699 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2700 || packed
2701 || (TYPE_ALIGN (record_type) != 0
2702 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2703 {
2704 DECL_BIT_FIELD (field_decl) = 1;
2705 DECL_SIZE (field_decl) = size;
2706 if (!packed && !pos)
2707 {
2708 if (TYPE_ALIGN (record_type) != 0
2709 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2710 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2711 else
2712 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2713 }
2714 }
2715
2716 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2717
2718 /* Bump the alignment if need be, either for bitfield/packing purposes or
2719 to satisfy the type requirements if no such consideration applies. When
2720 we get the alignment from the type, indicate if this is from an explicit
2721 user request, which prevents stor-layout from lowering it later on. */
2722 {
2723 unsigned int bit_align
2724 = (DECL_BIT_FIELD (field_decl) ? 1
2725 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2726
2727 if (bit_align > DECL_ALIGN (field_decl))
2728 SET_DECL_ALIGN (field_decl, bit_align);
2729 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2730 {
2731 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2732 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2733 }
2734 }
2735
2736 if (pos)
2737 {
2738 /* We need to pass in the alignment the DECL is known to have.
2739 This is the lowest-order bit set in POS, but no more than
2740 the alignment of the record, if one is specified. Note
2741 that an alignment of 0 is taken as infinite. */
2742 unsigned int known_align;
2743
2744 if (tree_fits_uhwi_p (pos))
2745 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2746 else
2747 known_align = BITS_PER_UNIT;
2748
2749 if (TYPE_ALIGN (record_type)
2750 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2751 known_align = TYPE_ALIGN (record_type);
2752
2753 layout_decl (field_decl, known_align);
2754 SET_DECL_OFFSET_ALIGN (field_decl,
2755 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2756 : BITS_PER_UNIT);
2757 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2758 &DECL_FIELD_BIT_OFFSET (field_decl),
2759 DECL_OFFSET_ALIGN (field_decl), pos);
2760 }
2761
2762 /* In addition to what our caller says, claim the field is addressable if we
2763 know that its type is not suitable.
2764
2765 The field may also be "technically" nonaddressable, meaning that even if
2766 we attempt to take the field's address we will actually get the address
2767 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2768 value we have at this point is not accurate enough, so we don't account
2769 for this here and let finish_record_type decide. */
2770 if (!addressable && !type_for_nonaliased_component_p (type))
2771 addressable = 1;
2772
2773 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2774
2775 return field_decl;
2776 }
2777 \f
2778 /* Return a PARM_DECL node with NAME and TYPE. */
2779
2780 tree
2781 create_param_decl (tree name, tree type)
2782 {
2783 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2784
2785 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2786 can lead to various ABI violations. */
2787 if (targetm.calls.promote_prototypes (NULL_TREE)
2788 && INTEGRAL_TYPE_P (type)
2789 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2790 {
2791 /* We have to be careful about biased types here. Make a subtype
2792 of integer_type_node with the proper biasing. */
2793 if (TREE_CODE (type) == INTEGER_TYPE
2794 && TYPE_BIASED_REPRESENTATION_P (type))
2795 {
2796 tree subtype
2797 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2798 TREE_TYPE (subtype) = integer_type_node;
2799 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2800 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2801 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2802 type = subtype;
2803 }
2804 else
2805 type = integer_type_node;
2806 }
2807
2808 DECL_ARG_TYPE (param_decl) = type;
2809 return param_decl;
2810 }
2811 \f
2812 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2813 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2814 changed. GNAT_NODE is used for the position of error messages. */
2815
2816 void
2817 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2818 Node_Id gnat_node)
2819 {
2820 struct attrib *attr;
2821
2822 for (attr = *attr_list; attr; attr = attr->next)
2823 switch (attr->type)
2824 {
2825 case ATTR_MACHINE_ATTRIBUTE:
2826 Sloc_to_locus (Sloc (gnat_node), &input_location);
2827 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2828 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2829 break;
2830
2831 case ATTR_LINK_ALIAS:
2832 if (!DECL_EXTERNAL (*node))
2833 {
2834 TREE_STATIC (*node) = 1;
2835 assemble_alias (*node, attr->name);
2836 }
2837 break;
2838
2839 case ATTR_WEAK_EXTERNAL:
2840 if (SUPPORTS_WEAK)
2841 declare_weak (*node);
2842 else
2843 post_error ("?weak declarations not supported on this target",
2844 attr->error_point);
2845 break;
2846
2847 case ATTR_LINK_SECTION:
2848 if (targetm_common.have_named_sections)
2849 {
2850 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2851 DECL_COMMON (*node) = 0;
2852 }
2853 else
2854 post_error ("?section attributes are not supported for this target",
2855 attr->error_point);
2856 break;
2857
2858 case ATTR_LINK_CONSTRUCTOR:
2859 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2860 TREE_USED (*node) = 1;
2861 break;
2862
2863 case ATTR_LINK_DESTRUCTOR:
2864 DECL_STATIC_DESTRUCTOR (*node) = 1;
2865 TREE_USED (*node) = 1;
2866 break;
2867
2868 case ATTR_THREAD_LOCAL_STORAGE:
2869 set_decl_tls_model (*node, decl_default_tls_model (*node));
2870 DECL_COMMON (*node) = 0;
2871 break;
2872 }
2873
2874 *attr_list = NULL;
2875 }
2876
2877 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2878 a power of 2. */
2879
2880 bool
2881 value_factor_p (tree value, HOST_WIDE_INT factor)
2882 {
2883 if (tree_fits_uhwi_p (value))
2884 return tree_to_uhwi (value) % factor == 0;
2885
2886 if (TREE_CODE (value) == MULT_EXPR)
2887 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2888 || value_factor_p (TREE_OPERAND (value, 1), factor));
2889
2890 return false;
2891 }
2892
2893 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2894 from the parameter association for the instantiation of a generic. We do
2895 not want to emit source location for them: the code generated for their
2896 initialization is likely to disturb debugging. */
2897
2898 bool
2899 renaming_from_instantiation_p (Node_Id gnat_node)
2900 {
2901 if (Nkind (gnat_node) != N_Defining_Identifier
2902 || !Is_Object (gnat_node)
2903 || Comes_From_Source (gnat_node)
2904 || !Present (Renamed_Object (gnat_node)))
2905 return false;
2906
2907 /* Get the object declaration of the renamed object, if any and if the
2908 renamed object is a mere identifier. */
2909 gnat_node = Renamed_Object (gnat_node);
2910 if (Nkind (gnat_node) != N_Identifier)
2911 return false;
2912
2913 gnat_node = Entity (gnat_node);
2914 if (!Present (Parent (gnat_node)))
2915 return false;
2916
2917 gnat_node = Parent (gnat_node);
2918 return
2919 (Present (gnat_node)
2920 && Nkind (gnat_node) == N_Object_Declaration
2921 && Present (Corresponding_Generic_Association (gnat_node)));
2922 }
2923
2924 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2925 feed it with the elaboration of GNAT_SCOPE. */
2926
2927 static struct deferred_decl_context_node *
2928 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2929 {
2930 struct deferred_decl_context_node *new_node;
2931
2932 new_node
2933 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2934 new_node->decl = decl;
2935 new_node->gnat_scope = gnat_scope;
2936 new_node->force_global = force_global;
2937 new_node->types.create (1);
2938 new_node->next = deferred_decl_context_queue;
2939 deferred_decl_context_queue = new_node;
2940 return new_node;
2941 }
2942
2943 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2944 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2945 computed. */
2946
2947 static void
2948 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2949 {
2950 n->types.safe_push (type);
2951 }
2952
2953 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2954 NULL_TREE if it is not available. */
2955
2956 static tree
2957 compute_deferred_decl_context (Entity_Id gnat_scope)
2958 {
2959 tree context;
2960
2961 if (present_gnu_tree (gnat_scope))
2962 context = get_gnu_tree (gnat_scope);
2963 else
2964 return NULL_TREE;
2965
2966 if (TREE_CODE (context) == TYPE_DECL)
2967 {
2968 const tree context_type = TREE_TYPE (context);
2969
2970 /* Skip dummy types: only the final ones can appear in the context
2971 chain. */
2972 if (TYPE_DUMMY_P (context_type))
2973 return NULL_TREE;
2974
2975 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2976 chain. */
2977 else
2978 context = context_type;
2979 }
2980
2981 return context;
2982 }
2983
2984 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2985 that cannot be processed yet, remove the other ones. If FORCE is true,
2986 force the processing for all nodes, use the global context when nodes don't
2987 have a GNU translation. */
2988
2989 void
2990 process_deferred_decl_context (bool force)
2991 {
2992 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2993 struct deferred_decl_context_node *node;
2994
2995 while (*it)
2996 {
2997 bool processed = false;
2998 tree context = NULL_TREE;
2999 Entity_Id gnat_scope;
3000
3001 node = *it;
3002
3003 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3004 get the first scope. */
3005 gnat_scope = node->gnat_scope;
3006 while (Present (gnat_scope))
3007 {
3008 context = compute_deferred_decl_context (gnat_scope);
3009 if (!force || context)
3010 break;
3011 gnat_scope = get_debug_scope (gnat_scope, NULL);
3012 }
3013
3014 /* Imported declarations must not be in a local context (i.e. not inside
3015 a function). */
3016 if (context && node->force_global > 0)
3017 {
3018 tree ctx = context;
3019
3020 while (ctx)
3021 {
3022 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3023 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3024 }
3025 }
3026
3027 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3028 was no elaborated scope, use the global context. */
3029 if (force && !context)
3030 context = get_global_context ();
3031
3032 if (context)
3033 {
3034 tree t;
3035 int i;
3036
3037 DECL_CONTEXT (node->decl) = context;
3038
3039 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3040 ..._TYPE nodes. */
3041 FOR_EACH_VEC_ELT (node->types, i, t)
3042 {
3043 gnat_set_type_context (t, context);
3044 }
3045 processed = true;
3046 }
3047
3048 /* If this node has been successfuly processed, remove it from the
3049 queue. Then move to the next node. */
3050 if (processed)
3051 {
3052 *it = node->next;
3053 node->types.release ();
3054 free (node);
3055 }
3056 else
3057 it = &node->next;
3058 }
3059 }
3060
3061 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3062
3063 static unsigned int
3064 scale_by_factor_of (tree expr, unsigned int value)
3065 {
3066 unsigned HOST_WIDE_INT addend = 0;
3067 unsigned HOST_WIDE_INT factor = 1;
3068
3069 /* Peel conversions around EXPR and try to extract bodies from function
3070 calls: it is possible to get the scale factor from size functions. */
3071 expr = remove_conversions (expr, true);
3072 if (TREE_CODE (expr) == CALL_EXPR)
3073 expr = maybe_inline_call_in_expr (expr);
3074
3075 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3076 multiple of the scale factor we are looking for. */
3077 if (TREE_CODE (expr) == PLUS_EXPR
3078 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3079 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3080 {
3081 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3082 expr = TREE_OPERAND (expr, 0);
3083 }
3084
3085 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3086 corresponding to the number of trailing zeros of the mask. */
3087 if (TREE_CODE (expr) == BIT_AND_EXPR
3088 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3089 {
3090 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3091 unsigned int i = 0;
3092
3093 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3094 {
3095 mask >>= 1;
3096 factor *= 2;
3097 i++;
3098 }
3099 }
3100
3101 /* If the addend is not a multiple of the factor we found, give up. In
3102 theory we could find a smaller common factor but it's useless for our
3103 needs. This situation arises when dealing with a field F1 with no
3104 alignment requirement but that is following a field F2 with such
3105 requirements. As long as we have F2's offset, we don't need alignment
3106 information to compute F1's. */
3107 if (addend % factor != 0)
3108 factor = 1;
3109
3110 return factor * value;
3111 }
3112
3113 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3114 unless we can prove these 2 fields are laid out in such a way that no gap
3115 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3116 is the distance in bits between the end of PREV_FIELD and the starting
3117 position of CURR_FIELD. It is ignored if null. */
3118
3119 static bool
3120 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3121 {
3122 /* If this is the first field of the record, there cannot be any gap */
3123 if (!prev_field)
3124 return false;
3125
3126 /* If the previous field is a union type, then return false: The only
3127 time when such a field is not the last field of the record is when
3128 there are other components at fixed positions after it (meaning there
3129 was a rep clause for every field), in which case we don't want the
3130 alignment constraint to override them. */
3131 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3132 return false;
3133
3134 /* If the distance between the end of prev_field and the beginning of
3135 curr_field is constant, then there is a gap if the value of this
3136 constant is not null. */
3137 if (offset && tree_fits_uhwi_p (offset))
3138 return !integer_zerop (offset);
3139
3140 /* If the size and position of the previous field are constant,
3141 then check the sum of this size and position. There will be a gap
3142 iff it is not multiple of the current field alignment. */
3143 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3144 && tree_fits_uhwi_p (bit_position (prev_field)))
3145 return ((tree_to_uhwi (bit_position (prev_field))
3146 + tree_to_uhwi (DECL_SIZE (prev_field)))
3147 % DECL_ALIGN (curr_field) != 0);
3148
3149 /* If both the position and size of the previous field are multiples
3150 of the current field alignment, there cannot be any gap. */
3151 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3152 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3153 return false;
3154
3155 /* Fallback, return that there may be a potential gap */
3156 return true;
3157 }
3158
3159 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3160 the decl. */
3161
3162 tree
3163 create_label_decl (tree name, Node_Id gnat_node)
3164 {
3165 tree label_decl
3166 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3167
3168 SET_DECL_MODE (label_decl, VOIDmode);
3169
3170 /* Add this decl to the current binding level. */
3171 gnat_pushdecl (label_decl, gnat_node);
3172
3173 return label_decl;
3174 }
3175 \f
3176 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3177 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3178 the list of its parameters (a list of PARM_DECL nodes chained through the
3179 DECL_CHAIN field).
3180
3181 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3182
3183 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3184 definition to be made visible outside of the current compilation unit.
3185
3186 EXTERN_FLAG is true when processing an external subprogram declaration.
3187
3188 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3189
3190 DEBUG_INFO_P is true if we need to write debug information for it.
3191
3192 DEFINITION is true if the subprogram is to be considered as a definition.
3193
3194 ATTR_LIST is the list of attributes to be attached to the subprogram.
3195
3196 GNAT_NODE is used for the position of the decl. */
3197
3198 tree
3199 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3200 enum inline_status_t inline_status, bool public_flag,
3201 bool extern_flag, bool artificial_p, bool debug_info_p,
3202 bool definition, struct attrib *attr_list,
3203 Node_Id gnat_node)
3204 {
3205 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3206 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3207
3208 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3209 DECL_EXTERNAL (subprog_decl) = extern_flag;
3210 TREE_PUBLIC (subprog_decl) = public_flag;
3211
3212 if (!debug_info_p)
3213 DECL_IGNORED_P (subprog_decl) = 1;
3214 if (definition)
3215 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3216
3217 switch (inline_status)
3218 {
3219 case is_suppressed:
3220 DECL_UNINLINABLE (subprog_decl) = 1;
3221 break;
3222
3223 case is_disabled:
3224 break;
3225
3226 case is_required:
3227 if (Back_End_Inlining)
3228 {
3229 decl_attributes (&subprog_decl,
3230 tree_cons (get_identifier ("always_inline"),
3231 NULL_TREE, NULL_TREE),
3232 ATTR_FLAG_TYPE_IN_PLACE);
3233
3234 /* Inline_Always guarantees that every direct call is inlined and
3235 that there is no indirect reference to the subprogram, so the
3236 instance in the original package (as well as its clones in the
3237 client packages created for inter-unit inlining) can be made
3238 private, which causes the out-of-line body to be eliminated. */
3239 TREE_PUBLIC (subprog_decl) = 0;
3240 }
3241
3242 /* ... fall through ... */
3243
3244 case is_enabled:
3245 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3246 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3247 break;
3248
3249 default:
3250 gcc_unreachable ();
3251 }
3252
3253 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3254
3255 /* Once everything is processed, finish the subprogram declaration. */
3256 finish_subprog_decl (subprog_decl, asm_name, type);
3257
3258 /* Add this decl to the current binding level. */
3259 gnat_pushdecl (subprog_decl, gnat_node);
3260
3261 /* Output the assembler code and/or RTL for the declaration. */
3262 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3263
3264 return subprog_decl;
3265 }
3266
3267 /* Given a subprogram declaration DECL, its assembler name and its type,
3268 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3269
3270 void
3271 finish_subprog_decl (tree decl, tree asm_name, tree type)
3272 {
3273 tree result_decl
3274 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3275 TREE_TYPE (type));
3276
3277 DECL_ARTIFICIAL (result_decl) = 1;
3278 DECL_IGNORED_P (result_decl) = 1;
3279 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3280 DECL_RESULT (decl) = result_decl;
3281
3282 TREE_READONLY (decl) = TYPE_READONLY (type);
3283 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3284
3285 if (asm_name)
3286 {
3287 /* Let the target mangle the name if this isn't a verbatim asm. */
3288 if (*IDENTIFIER_POINTER (asm_name) != '*')
3289 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3290
3291 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3292
3293 /* The expand_main_function circuitry expects "main_identifier_node" to
3294 designate the DECL_NAME of the 'main' entry point, in turn expected
3295 to be declared as the "main" function literally by default. Ada
3296 program entry points are typically declared with a different name
3297 within the binder generated file, exported as 'main' to satisfy the
3298 system expectations. Force main_identifier_node in this case. */
3299 if (asm_name == main_identifier_node)
3300 DECL_NAME (decl) = main_identifier_node;
3301 }
3302 }
3303 \f
3304 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3305 body. This routine needs to be invoked before processing the declarations
3306 appearing in the subprogram. */
3307
3308 void
3309 begin_subprog_body (tree subprog_decl)
3310 {
3311 tree param_decl;
3312
3313 announce_function (subprog_decl);
3314
3315 /* This function is being defined. */
3316 TREE_STATIC (subprog_decl) = 1;
3317
3318 /* The failure of this assertion will likely come from a wrong context for
3319 the subprogram body, e.g. another procedure for a procedure declared at
3320 library level. */
3321 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3322
3323 current_function_decl = subprog_decl;
3324
3325 /* Enter a new binding level and show that all the parameters belong to
3326 this function. */
3327 gnat_pushlevel ();
3328
3329 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3330 param_decl = DECL_CHAIN (param_decl))
3331 DECL_CONTEXT (param_decl) = subprog_decl;
3332
3333 make_decl_rtl (subprog_decl);
3334 }
3335
3336 /* Finish translating the current subprogram and set its BODY. */
3337
3338 void
3339 end_subprog_body (tree body)
3340 {
3341 tree fndecl = current_function_decl;
3342
3343 /* Attach the BLOCK for this level to the function and pop the level. */
3344 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3345 DECL_INITIAL (fndecl) = current_binding_level->block;
3346 gnat_poplevel ();
3347
3348 /* Mark the RESULT_DECL as being in this subprogram. */
3349 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3350
3351 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3352 if (TREE_CODE (body) == BIND_EXPR)
3353 {
3354 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3355 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3356 }
3357
3358 DECL_SAVED_TREE (fndecl) = body;
3359
3360 current_function_decl = decl_function_context (fndecl);
3361 }
3362
3363 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3364
3365 void
3366 rest_of_subprog_body_compilation (tree subprog_decl)
3367 {
3368 /* We cannot track the location of errors past this point. */
3369 error_gnat_node = Empty;
3370
3371 /* If we're only annotating types, don't actually compile this function. */
3372 if (type_annotate_only)
3373 return;
3374
3375 /* Dump functions before gimplification. */
3376 dump_function (TDI_original, subprog_decl);
3377
3378 if (!decl_function_context (subprog_decl))
3379 cgraph_node::finalize_function (subprog_decl, false);
3380 else
3381 /* Register this function with cgraph just far enough to get it
3382 added to our parent's nested function list. */
3383 (void) cgraph_node::get_create (subprog_decl);
3384 }
3385
3386 tree
3387 gnat_builtin_function (tree decl)
3388 {
3389 gnat_pushdecl (decl, Empty);
3390 return decl;
3391 }
3392
3393 /* Return an integer type with the number of bits of precision given by
3394 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3395 it is a signed type. */
3396
3397 tree
3398 gnat_type_for_size (unsigned precision, int unsignedp)
3399 {
3400 tree t;
3401 char type_name[20];
3402
3403 if (precision <= 2 * MAX_BITS_PER_WORD
3404 && signed_and_unsigned_types[precision][unsignedp])
3405 return signed_and_unsigned_types[precision][unsignedp];
3406
3407 if (unsignedp)
3408 t = make_unsigned_type (precision);
3409 else
3410 t = make_signed_type (precision);
3411 TYPE_ARTIFICIAL (t) = 1;
3412
3413 if (precision <= 2 * MAX_BITS_PER_WORD)
3414 signed_and_unsigned_types[precision][unsignedp] = t;
3415
3416 if (!TYPE_NAME (t))
3417 {
3418 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3419 TYPE_NAME (t) = get_identifier (type_name);
3420 }
3421
3422 return t;
3423 }
3424
3425 /* Likewise for floating-point types. */
3426
3427 static tree
3428 float_type_for_precision (int precision, machine_mode mode)
3429 {
3430 tree t;
3431 char type_name[20];
3432
3433 if (float_types[(int) mode])
3434 return float_types[(int) mode];
3435
3436 float_types[(int) mode] = t = make_node (REAL_TYPE);
3437 TYPE_PRECISION (t) = precision;
3438 layout_type (t);
3439
3440 gcc_assert (TYPE_MODE (t) == mode);
3441 if (!TYPE_NAME (t))
3442 {
3443 sprintf (type_name, "FLOAT_%d", precision);
3444 TYPE_NAME (t) = get_identifier (type_name);
3445 }
3446
3447 return t;
3448 }
3449
3450 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3451 an unsigned type; otherwise a signed type is returned. */
3452
3453 tree
3454 gnat_type_for_mode (machine_mode mode, int unsignedp)
3455 {
3456 if (mode == BLKmode)
3457 return NULL_TREE;
3458
3459 if (mode == VOIDmode)
3460 return void_type_node;
3461
3462 if (COMPLEX_MODE_P (mode))
3463 return NULL_TREE;
3464
3465 scalar_float_mode float_mode;
3466 if (is_a <scalar_float_mode> (mode, &float_mode))
3467 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3468 float_mode);
3469
3470 scalar_int_mode int_mode;
3471 if (is_a <scalar_int_mode> (mode, &int_mode))
3472 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3473
3474 if (VECTOR_MODE_P (mode))
3475 {
3476 machine_mode inner_mode = GET_MODE_INNER (mode);
3477 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3478 if (inner_type)
3479 return build_vector_type_for_mode (inner_type, mode);
3480 }
3481
3482 return NULL_TREE;
3483 }
3484
3485 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3486 signedness being specified by UNSIGNEDP. */
3487
3488 tree
3489 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3490 {
3491 if (type_node == char_type_node)
3492 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3493
3494 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3495
3496 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3497 {
3498 type = copy_type (type);
3499 TREE_TYPE (type) = type_node;
3500 }
3501 else if (TREE_TYPE (type_node)
3502 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3503 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3504 {
3505 type = copy_type (type);
3506 TREE_TYPE (type) = TREE_TYPE (type_node);
3507 }
3508
3509 return type;
3510 }
3511
3512 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3513 transparently converted to each other. */
3514
3515 int
3516 gnat_types_compatible_p (tree t1, tree t2)
3517 {
3518 enum tree_code code;
3519
3520 /* This is the default criterion. */
3521 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3522 return 1;
3523
3524 /* We only check structural equivalence here. */
3525 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3526 return 0;
3527
3528 /* Vector types are also compatible if they have the same number of subparts
3529 and the same form of (scalar) element type. */
3530 if (code == VECTOR_TYPE
3531 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3532 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3533 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3534 return 1;
3535
3536 /* Array types are also compatible if they are constrained and have the same
3537 domain(s), the same component type and the same scalar storage order. */
3538 if (code == ARRAY_TYPE
3539 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3540 || (TYPE_DOMAIN (t1)
3541 && TYPE_DOMAIN (t2)
3542 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3543 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3544 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3545 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3546 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3547 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3548 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3549 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3550 return 1;
3551
3552 return 0;
3553 }
3554
3555 /* Return true if EXPR is a useless type conversion. */
3556
3557 bool
3558 gnat_useless_type_conversion (tree expr)
3559 {
3560 if (CONVERT_EXPR_P (expr)
3561 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3562 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3563 return gnat_types_compatible_p (TREE_TYPE (expr),
3564 TREE_TYPE (TREE_OPERAND (expr, 0)));
3565
3566 return false;
3567 }
3568
3569 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3570
3571 bool
3572 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3573 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3574 {
3575 return TYPE_CI_CO_LIST (t) == cico_list
3576 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3577 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3578 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3579 }
3580 \f
3581 /* EXP is an expression for the size of an object. If this size contains
3582 discriminant references, replace them with the maximum (if MAX_P) or
3583 minimum (if !MAX_P) possible value of the discriminant. */
3584
3585 tree
3586 max_size (tree exp, bool max_p)
3587 {
3588 enum tree_code code = TREE_CODE (exp);
3589 tree type = TREE_TYPE (exp);
3590 tree op0, op1, op2;
3591
3592 switch (TREE_CODE_CLASS (code))
3593 {
3594 case tcc_declaration:
3595 case tcc_constant:
3596 return exp;
3597
3598 case tcc_vl_exp:
3599 if (code == CALL_EXPR)
3600 {
3601 tree t, *argarray;
3602 int n, i;
3603
3604 t = maybe_inline_call_in_expr (exp);
3605 if (t)
3606 return max_size (t, max_p);
3607
3608 n = call_expr_nargs (exp);
3609 gcc_assert (n > 0);
3610 argarray = XALLOCAVEC (tree, n);
3611 for (i = 0; i < n; i++)
3612 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3613 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3614 }
3615 break;
3616
3617 case tcc_reference:
3618 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3619 modify. Otherwise, we treat it like a variable. */
3620 if (CONTAINS_PLACEHOLDER_P (exp))
3621 {
3622 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3623 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3624 return
3625 convert (type,
3626 max_size (convert (get_base_type (val_type), val), true));
3627 }
3628
3629 return exp;
3630
3631 case tcc_comparison:
3632 return build_int_cst (type, max_p ? 1 : 0);
3633
3634 case tcc_unary:
3635 if (code == NON_LVALUE_EXPR)
3636 return max_size (TREE_OPERAND (exp, 0), max_p);
3637
3638 op0 = max_size (TREE_OPERAND (exp, 0),
3639 code == NEGATE_EXPR ? !max_p : max_p);
3640
3641 if (op0 == TREE_OPERAND (exp, 0))
3642 return exp;
3643
3644 return fold_build1 (code, type, op0);
3645
3646 case tcc_binary:
3647 {
3648 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3649 tree rhs = max_size (TREE_OPERAND (exp, 1),
3650 code == MINUS_EXPR ? !max_p : max_p);
3651
3652 /* Special-case wanting the maximum value of a MIN_EXPR.
3653 In that case, if one side overflows, return the other. */
3654 if (max_p && code == MIN_EXPR)
3655 {
3656 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3657 return lhs;
3658
3659 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3660 return rhs;
3661 }
3662
3663 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3664 overflowing and the RHS a variable. */
3665 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3666 && TREE_CODE (lhs) == INTEGER_CST
3667 && TREE_OVERFLOW (lhs)
3668 && TREE_CODE (rhs) != INTEGER_CST)
3669 return lhs;
3670
3671 /* If we are going to subtract a "negative" value in an unsigned type,
3672 do the operation as an addition of the negated value, in order to
3673 avoid creating a spurious overflow below. */
3674 if (code == MINUS_EXPR
3675 && TYPE_UNSIGNED (type)
3676 && TREE_CODE (rhs) == INTEGER_CST
3677 && !TREE_OVERFLOW (rhs)
3678 && tree_int_cst_sign_bit (rhs) != 0)
3679 {
3680 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3681 code = PLUS_EXPR;
3682 }
3683
3684 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3685 return exp;
3686
3687 /* We need to detect overflows so we call size_binop here. */
3688 return size_binop (code, lhs, rhs);
3689 }
3690
3691 case tcc_expression:
3692 switch (TREE_CODE_LENGTH (code))
3693 {
3694 case 1:
3695 if (code == SAVE_EXPR)
3696 return exp;
3697
3698 op0 = max_size (TREE_OPERAND (exp, 0),
3699 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3700
3701 if (op0 == TREE_OPERAND (exp, 0))
3702 return exp;
3703
3704 return fold_build1 (code, type, op0);
3705
3706 case 2:
3707 if (code == COMPOUND_EXPR)
3708 return max_size (TREE_OPERAND (exp, 1), max_p);
3709
3710 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3711 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3712
3713 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3714 return exp;
3715
3716 return fold_build2 (code, type, op0, op1);
3717
3718 case 3:
3719 if (code == COND_EXPR)
3720 {
3721 op1 = TREE_OPERAND (exp, 1);
3722 op2 = TREE_OPERAND (exp, 2);
3723
3724 if (!op1 || !op2)
3725 return exp;
3726
3727 return
3728 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3729 max_size (op1, max_p), max_size (op2, max_p));
3730 }
3731 break;
3732
3733 default:
3734 break;
3735 }
3736
3737 /* Other tree classes cannot happen. */
3738 default:
3739 break;
3740 }
3741
3742 gcc_unreachable ();
3743 }
3744 \f
3745 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3746 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3747 Return a constructor for the template. */
3748
3749 tree
3750 build_template (tree template_type, tree array_type, tree expr)
3751 {
3752 vec<constructor_elt, va_gc> *template_elts = NULL;
3753 tree bound_list = NULL_TREE;
3754 tree field;
3755
3756 while (TREE_CODE (array_type) == RECORD_TYPE
3757 && (TYPE_PADDING_P (array_type)
3758 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3759 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3760
3761 if (TREE_CODE (array_type) == ARRAY_TYPE
3762 || (TREE_CODE (array_type) == INTEGER_TYPE
3763 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3764 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3765
3766 /* First make the list for a CONSTRUCTOR for the template. Go down the
3767 field list of the template instead of the type chain because this
3768 array might be an Ada array of arrays and we can't tell where the
3769 nested arrays stop being the underlying object. */
3770
3771 for (field = TYPE_FIELDS (template_type); field;
3772 (bound_list
3773 ? (bound_list = TREE_CHAIN (bound_list))
3774 : (array_type = TREE_TYPE (array_type))),
3775 field = DECL_CHAIN (DECL_CHAIN (field)))
3776 {
3777 tree bounds, min, max;
3778
3779 /* If we have a bound list, get the bounds from there. Likewise
3780 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3781 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3782 This will give us a maximum range. */
3783 if (bound_list)
3784 bounds = TREE_VALUE (bound_list);
3785 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3786 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3787 else if (expr && TREE_CODE (expr) == PARM_DECL
3788 && DECL_BY_COMPONENT_PTR_P (expr))
3789 bounds = TREE_TYPE (field);
3790 else
3791 gcc_unreachable ();
3792
3793 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3794 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3795
3796 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3797 substitute it from OBJECT. */
3798 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3799 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3800
3801 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3802 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3803 }
3804
3805 return gnat_build_constructor (template_type, template_elts);
3806 }
3807 \f
3808 /* Return true if TYPE is suitable for the element type of a vector. */
3809
3810 static bool
3811 type_for_vector_element_p (tree type)
3812 {
3813 machine_mode mode;
3814
3815 if (!INTEGRAL_TYPE_P (type)
3816 && !SCALAR_FLOAT_TYPE_P (type)
3817 && !FIXED_POINT_TYPE_P (type))
3818 return false;
3819
3820 mode = TYPE_MODE (type);
3821 if (GET_MODE_CLASS (mode) != MODE_INT
3822 && !SCALAR_FLOAT_MODE_P (mode)
3823 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3824 return false;
3825
3826 return true;
3827 }
3828
3829 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3830 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3831 attribute declaration and want to issue error messages on failure. */
3832
3833 static tree
3834 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3835 {
3836 unsigned HOST_WIDE_INT size_int, inner_size_int;
3837 int nunits;
3838
3839 /* Silently punt on variable sizes. We can't make vector types for them,
3840 need to ignore them on front-end generated subtypes of unconstrained
3841 base types, and this attribute is for binding implementors, not end
3842 users, so we should never get there from legitimate explicit uses. */
3843 if (!tree_fits_uhwi_p (size))
3844 return NULL_TREE;
3845 size_int = tree_to_uhwi (size);
3846
3847 if (!type_for_vector_element_p (inner_type))
3848 {
3849 if (attribute)
3850 error ("invalid element type for attribute %qs",
3851 IDENTIFIER_POINTER (attribute));
3852 return NULL_TREE;
3853 }
3854 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3855
3856 if (size_int % inner_size_int)
3857 {
3858 if (attribute)
3859 error ("vector size not an integral multiple of component size");
3860 return NULL_TREE;
3861 }
3862
3863 if (size_int == 0)
3864 {
3865 if (attribute)
3866 error ("zero vector size");
3867 return NULL_TREE;
3868 }
3869
3870 nunits = size_int / inner_size_int;
3871 if (nunits & (nunits - 1))
3872 {
3873 if (attribute)
3874 error ("number of components of vector not a power of two");
3875 return NULL_TREE;
3876 }
3877
3878 return build_vector_type (inner_type, nunits);
3879 }
3880
3881 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3882 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3883 processing the attribute and want to issue error messages on failure. */
3884
3885 static tree
3886 build_vector_type_for_array (tree array_type, tree attribute)
3887 {
3888 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3889 TYPE_SIZE_UNIT (array_type),
3890 attribute);
3891 if (!vector_type)
3892 return NULL_TREE;
3893
3894 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3895 return vector_type;
3896 }
3897 \f
3898 /* Build a type to be used to represent an aliased object whose nominal type
3899 is an unconstrained array. This consists of a RECORD_TYPE containing a
3900 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3901 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3902 an arbitrary unconstrained object. Use NAME as the name of the record.
3903 DEBUG_INFO_P is true if we need to write debug information for the type. */
3904
3905 tree
3906 build_unc_object_type (tree template_type, tree object_type, tree name,
3907 bool debug_info_p)
3908 {
3909 tree decl;
3910 tree type = make_node (RECORD_TYPE);
3911 tree template_field
3912 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3913 NULL_TREE, NULL_TREE, 0, 1);
3914 tree array_field
3915 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3916 NULL_TREE, NULL_TREE, 0, 1);
3917
3918 TYPE_NAME (type) = name;
3919 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3920 DECL_CHAIN (template_field) = array_field;
3921 finish_record_type (type, template_field, 0, true);
3922
3923 /* Declare it now since it will never be declared otherwise. This is
3924 necessary to ensure that its subtrees are properly marked. */
3925 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3926
3927 /* template_type will not be used elsewhere than here, so to keep the debug
3928 info clean and in order to avoid scoping issues, make decl its
3929 context. */
3930 gnat_set_type_context (template_type, decl);
3931
3932 return type;
3933 }
3934
3935 /* Same, taking a thin or fat pointer type instead of a template type. */
3936
3937 tree
3938 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3939 tree name, bool debug_info_p)
3940 {
3941 tree template_type;
3942
3943 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3944
3945 template_type
3946 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3947 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3948 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3949
3950 return
3951 build_unc_object_type (template_type, object_type, name, debug_info_p);
3952 }
3953 \f
3954 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3955 In the normal case this is just two adjustments, but we have more to
3956 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3957
3958 void
3959 update_pointer_to (tree old_type, tree new_type)
3960 {
3961 tree ptr = TYPE_POINTER_TO (old_type);
3962 tree ref = TYPE_REFERENCE_TO (old_type);
3963 tree t;
3964
3965 /* If this is the main variant, process all the other variants first. */
3966 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3967 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3968 update_pointer_to (t, new_type);
3969
3970 /* If no pointers and no references, we are done. */
3971 if (!ptr && !ref)
3972 return;
3973
3974 /* Merge the old type qualifiers in the new type.
3975
3976 Each old variant has qualifiers for specific reasons, and the new
3977 designated type as well. Each set of qualifiers represents useful
3978 information grabbed at some point, and merging the two simply unifies
3979 these inputs into the final type description.
3980
3981 Consider for instance a volatile type frozen after an access to constant
3982 type designating it; after the designated type's freeze, we get here with
3983 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3984 when the access type was processed. We will make a volatile and readonly
3985 designated type, because that's what it really is.
3986
3987 We might also get here for a non-dummy OLD_TYPE variant with different
3988 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3989 to private record type elaboration (see the comments around the call to
3990 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3991 the qualifiers in those cases too, to avoid accidentally discarding the
3992 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3993 new_type
3994 = build_qualified_type (new_type,
3995 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3996
3997 /* If old type and new type are identical, there is nothing to do. */
3998 if (old_type == new_type)
3999 return;
4000
4001 /* Otherwise, first handle the simple case. */
4002 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4003 {
4004 tree new_ptr, new_ref;
4005
4006 /* If pointer or reference already points to new type, nothing to do.
4007 This can happen as update_pointer_to can be invoked multiple times
4008 on the same couple of types because of the type variants. */
4009 if ((ptr && TREE_TYPE (ptr) == new_type)
4010 || (ref && TREE_TYPE (ref) == new_type))
4011 return;
4012
4013 /* Chain PTR and its variants at the end. */
4014 new_ptr = TYPE_POINTER_TO (new_type);
4015 if (new_ptr)
4016 {
4017 while (TYPE_NEXT_PTR_TO (new_ptr))
4018 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4019 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4020 }
4021 else
4022 TYPE_POINTER_TO (new_type) = ptr;
4023
4024 /* Now adjust them. */
4025 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4026 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4027 {
4028 TREE_TYPE (t) = new_type;
4029 if (TYPE_NULL_BOUNDS (t))
4030 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4031 }
4032
4033 /* Chain REF and its variants at the end. */
4034 new_ref = TYPE_REFERENCE_TO (new_type);
4035 if (new_ref)
4036 {
4037 while (TYPE_NEXT_REF_TO (new_ref))
4038 new_ref = TYPE_NEXT_REF_TO (new_ref);
4039 TYPE_NEXT_REF_TO (new_ref) = ref;
4040 }
4041 else
4042 TYPE_REFERENCE_TO (new_type) = ref;
4043
4044 /* Now adjust them. */
4045 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4046 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4047 TREE_TYPE (t) = new_type;
4048
4049 TYPE_POINTER_TO (old_type) = NULL_TREE;
4050 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4051 }
4052
4053 /* Now deal with the unconstrained array case. In this case the pointer
4054 is actually a record where both fields are pointers to dummy nodes.
4055 Turn them into pointers to the correct types using update_pointer_to.
4056 Likewise for the pointer to the object record (thin pointer). */
4057 else
4058 {
4059 tree new_ptr = TYPE_POINTER_TO (new_type);
4060
4061 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4062
4063 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4064 since update_pointer_to can be invoked multiple times on the same
4065 couple of types because of the type variants. */
4066 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4067 return;
4068
4069 update_pointer_to
4070 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4071 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4072
4073 update_pointer_to
4074 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4075 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4076
4077 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4078 TYPE_OBJECT_RECORD_TYPE (new_type));
4079
4080 TYPE_POINTER_TO (old_type) = NULL_TREE;
4081 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4082 }
4083 }
4084 \f
4085 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4086 unconstrained one. This involves making or finding a template. */
4087
4088 static tree
4089 convert_to_fat_pointer (tree type, tree expr)
4090 {
4091 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4092 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4093 tree etype = TREE_TYPE (expr);
4094 tree template_addr;
4095 vec<constructor_elt, va_gc> *v;
4096 vec_alloc (v, 2);
4097
4098 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4099 array (compare_fat_pointers ensures that this is the full discriminant)
4100 and a valid pointer to the bounds. This latter property is necessary
4101 since the compiler can hoist the load of the bounds done through it. */
4102 if (integer_zerop (expr))
4103 {
4104 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4105 tree null_bounds, t;
4106
4107 if (TYPE_NULL_BOUNDS (ptr_template_type))
4108 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4109 else
4110 {
4111 /* The template type can still be dummy at this point so we build an
4112 empty constructor. The middle-end will fill it in with zeros. */
4113 t = build_constructor (template_type, NULL);
4114 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4115 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4116 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4117 }
4118
4119 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4120 fold_convert (p_array_type, null_pointer_node));
4121 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4122 t = build_constructor (type, v);
4123 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4124 TREE_CONSTANT (t) = 0;
4125 TREE_STATIC (t) = 1;
4126
4127 return t;
4128 }
4129
4130 /* If EXPR is a thin pointer, make template and data from the record. */
4131 if (TYPE_IS_THIN_POINTER_P (etype))
4132 {
4133 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4134
4135 expr = gnat_protect_expr (expr);
4136
4137 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4138 the thin pointer value has been shifted so we shift it back to get
4139 the template address. */
4140 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4141 {
4142 template_addr
4143 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4144 fold_build1 (NEGATE_EXPR, sizetype,
4145 byte_position
4146 (DECL_CHAIN (field))));
4147 template_addr
4148 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4149 template_addr);
4150 }
4151
4152 /* Otherwise we explicitly take the address of the fields. */
4153 else
4154 {
4155 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4156 template_addr
4157 = build_unary_op (ADDR_EXPR, NULL_TREE,
4158 build_component_ref (expr, field, false));
4159 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4160 build_component_ref (expr, DECL_CHAIN (field),
4161 false));
4162 }
4163 }
4164
4165 /* Otherwise, build the constructor for the template. */
4166 else
4167 template_addr
4168 = build_unary_op (ADDR_EXPR, NULL_TREE,
4169 build_template (template_type, TREE_TYPE (etype),
4170 expr));
4171
4172 /* The final result is a constructor for the fat pointer.
4173
4174 If EXPR is an argument of a foreign convention subprogram, the type it
4175 points to is directly the component type. In this case, the expression
4176 type may not match the corresponding FIELD_DECL type at this point, so we
4177 call "convert" here to fix that up if necessary. This type consistency is
4178 required, for instance because it ensures that possible later folding of
4179 COMPONENT_REFs against this constructor always yields something of the
4180 same type as the initial reference.
4181
4182 Note that the call to "build_template" above is still fine because it
4183 will only refer to the provided TEMPLATE_TYPE in this case. */
4184 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4185 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4186 return gnat_build_constructor (type, v);
4187 }
4188 \f
4189 /* Create an expression whose value is that of EXPR,
4190 converted to type TYPE. The TREE_TYPE of the value
4191 is always TYPE. This function implements all reasonable
4192 conversions; callers should filter out those that are
4193 not permitted by the language being compiled. */
4194
4195 tree
4196 convert (tree type, tree expr)
4197 {
4198 tree etype = TREE_TYPE (expr);
4199 enum tree_code ecode = TREE_CODE (etype);
4200 enum tree_code code = TREE_CODE (type);
4201
4202 /* If the expression is already of the right type, we are done. */
4203 if (etype == type)
4204 return expr;
4205
4206 /* If both input and output have padding and are of variable size, do this
4207 as an unchecked conversion. Likewise if one is a mere variant of the
4208 other, so we avoid a pointless unpad/repad sequence. */
4209 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4210 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4211 && (!TREE_CONSTANT (TYPE_SIZE (type))
4212 || !TREE_CONSTANT (TYPE_SIZE (etype))
4213 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4214 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4215 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4216 ;
4217
4218 /* If the output type has padding, convert to the inner type and make a
4219 constructor to build the record, unless a variable size is involved. */
4220 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4221 {
4222 vec<constructor_elt, va_gc> *v;
4223
4224 /* If we previously converted from another type and our type is
4225 of variable size, remove the conversion to avoid the need for
4226 variable-sized temporaries. Likewise for a conversion between
4227 original and packable version. */
4228 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4229 && (!TREE_CONSTANT (TYPE_SIZE (type))
4230 || (ecode == RECORD_TYPE
4231 && TYPE_NAME (etype)
4232 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4233 expr = TREE_OPERAND (expr, 0);
4234
4235 /* If we are just removing the padding from expr, convert the original
4236 object if we have variable size in order to avoid the need for some
4237 variable-sized temporaries. Likewise if the padding is a variant
4238 of the other, so we avoid a pointless unpad/repad sequence. */
4239 if (TREE_CODE (expr) == COMPONENT_REF
4240 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4241 && (!TREE_CONSTANT (TYPE_SIZE (type))
4242 || TYPE_MAIN_VARIANT (type)
4243 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4244 || (ecode == RECORD_TYPE
4245 && TYPE_NAME (etype)
4246 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4247 return convert (type, TREE_OPERAND (expr, 0));
4248
4249 /* If the inner type is of self-referential size and the expression type
4250 is a record, do this as an unchecked conversion. But first pad the
4251 expression if possible to have the same size on both sides. */
4252 if (ecode == RECORD_TYPE
4253 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4254 {
4255 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4256 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4257 false, false, false, true),
4258 expr);
4259 return unchecked_convert (type, expr, false);
4260 }
4261
4262 /* If we are converting between array types with variable size, do the
4263 final conversion as an unchecked conversion, again to avoid the need
4264 for some variable-sized temporaries. If valid, this conversion is
4265 very likely purely technical and without real effects. */
4266 if (ecode == ARRAY_TYPE
4267 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4268 && !TREE_CONSTANT (TYPE_SIZE (etype))
4269 && !TREE_CONSTANT (TYPE_SIZE (type)))
4270 return unchecked_convert (type,
4271 convert (TREE_TYPE (TYPE_FIELDS (type)),
4272 expr),
4273 false);
4274
4275 vec_alloc (v, 1);
4276 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4277 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4278 return gnat_build_constructor (type, v);
4279 }
4280
4281 /* If the input type has padding, remove it and convert to the output type.
4282 The conditions ordering is arranged to ensure that the output type is not
4283 a padding type here, as it is not clear whether the conversion would
4284 always be correct if this was to happen. */
4285 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4286 {
4287 tree unpadded;
4288
4289 /* If we have just converted to this padded type, just get the
4290 inner expression. */
4291 if (TREE_CODE (expr) == CONSTRUCTOR)
4292 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4293
4294 /* Otherwise, build an explicit component reference. */
4295 else
4296 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4297
4298 return convert (type, unpadded);
4299 }
4300
4301 /* If the input is a biased type, convert first to the base type and add
4302 the bias. Note that the bias must go through a full conversion to the
4303 base type, lest it is itself a biased value; this happens for subtypes
4304 of biased types. */
4305 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4306 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4307 fold_convert (TREE_TYPE (etype), expr),
4308 convert (TREE_TYPE (etype),
4309 TYPE_MIN_VALUE (etype))));
4310
4311 /* If the input is a justified modular type, we need to extract the actual
4312 object before converting it to any other type with the exceptions of an
4313 unconstrained array or of a mere type variant. It is useful to avoid the
4314 extraction and conversion in the type variant case because it could end
4315 up replacing a VAR_DECL expr by a constructor and we might be about the
4316 take the address of the result. */
4317 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4318 && code != UNCONSTRAINED_ARRAY_TYPE
4319 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4320 return
4321 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4322
4323 /* If converting to a type that contains a template, convert to the data
4324 type and then build the template. */
4325 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4326 {
4327 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4328 vec<constructor_elt, va_gc> *v;
4329 vec_alloc (v, 2);
4330
4331 /* If the source already has a template, get a reference to the
4332 associated array only, as we are going to rebuild a template
4333 for the target type anyway. */
4334 expr = maybe_unconstrained_array (expr);
4335
4336 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4337 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4338 obj_type, NULL_TREE));
4339 if (expr)
4340 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4341 convert (obj_type, expr));
4342 return gnat_build_constructor (type, v);
4343 }
4344
4345 /* There are some cases of expressions that we process specially. */
4346 switch (TREE_CODE (expr))
4347 {
4348 case ERROR_MARK:
4349 return expr;
4350
4351 case NULL_EXPR:
4352 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4353 conversion in gnat_expand_expr. NULL_EXPR does not represent
4354 and actual value, so no conversion is needed. */
4355 expr = copy_node (expr);
4356 TREE_TYPE (expr) = type;
4357 return expr;
4358
4359 case STRING_CST:
4360 /* If we are converting a STRING_CST to another constrained array type,
4361 just make a new one in the proper type. */
4362 if (code == ecode && AGGREGATE_TYPE_P (etype)
4363 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4364 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4365 {
4366 expr = copy_node (expr);
4367 TREE_TYPE (expr) = type;
4368 return expr;
4369 }
4370 break;
4371
4372 case VECTOR_CST:
4373 /* If we are converting a VECTOR_CST to a mere type variant, just make
4374 a new one in the proper type. */
4375 if (code == ecode && gnat_types_compatible_p (type, etype))
4376 {
4377 expr = copy_node (expr);
4378 TREE_TYPE (expr) = type;
4379 return expr;
4380 }
4381 break;
4382
4383 case CONSTRUCTOR:
4384 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4385 another padding type around the same type, just make a new one in
4386 the proper type. */
4387 if (code == ecode
4388 && (gnat_types_compatible_p (type, etype)
4389 || (code == RECORD_TYPE
4390 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4391 && TREE_TYPE (TYPE_FIELDS (type))
4392 == TREE_TYPE (TYPE_FIELDS (etype)))))
4393 {
4394 expr = copy_node (expr);
4395 TREE_TYPE (expr) = type;
4396 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4397 return expr;
4398 }
4399
4400 /* Likewise for a conversion between original and packable version, or
4401 conversion between types of the same size and with the same list of
4402 fields, but we have to work harder to preserve type consistency. */
4403 if (code == ecode
4404 && code == RECORD_TYPE
4405 && (TYPE_NAME (type) == TYPE_NAME (etype)
4406 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4407
4408 {
4409 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4410 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4411 vec<constructor_elt, va_gc> *v;
4412 vec_alloc (v, len);
4413 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4414 unsigned HOST_WIDE_INT idx;
4415 tree index, value;
4416
4417 /* Whether we need to clear TREE_CONSTANT et al. on the output
4418 constructor when we convert in place. */
4419 bool clear_constant = false;
4420
4421 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4422 {
4423 /* Skip the missing fields in the CONSTRUCTOR. */
4424 while (efield && field && !SAME_FIELD_P (efield, index))
4425 {
4426 efield = DECL_CHAIN (efield);
4427 field = DECL_CHAIN (field);
4428 }
4429 /* The field must be the same. */
4430 if (!(efield && field && SAME_FIELD_P (efield, field)))
4431 break;
4432 constructor_elt elt
4433 = {field, convert (TREE_TYPE (field), value)};
4434 v->quick_push (elt);
4435
4436 /* If packing has made this field a bitfield and the input
4437 value couldn't be emitted statically any more, we need to
4438 clear TREE_CONSTANT on our output. */
4439 if (!clear_constant
4440 && TREE_CONSTANT (expr)
4441 && !CONSTRUCTOR_BITFIELD_P (efield)
4442 && CONSTRUCTOR_BITFIELD_P (field)
4443 && !initializer_constant_valid_for_bitfield_p (value))
4444 clear_constant = true;
4445
4446 efield = DECL_CHAIN (efield);
4447 field = DECL_CHAIN (field);
4448 }
4449
4450 /* If we have been able to match and convert all the input fields
4451 to their output type, convert in place now. We'll fallback to a
4452 view conversion downstream otherwise. */
4453 if (idx == len)
4454 {
4455 expr = copy_node (expr);
4456 TREE_TYPE (expr) = type;
4457 CONSTRUCTOR_ELTS (expr) = v;
4458 if (clear_constant)
4459 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4460 return expr;
4461 }
4462 }
4463
4464 /* Likewise for a conversion between array type and vector type with a
4465 compatible representative array. */
4466 else if (code == VECTOR_TYPE
4467 && ecode == ARRAY_TYPE
4468 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4469 etype))
4470 {
4471 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4472 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4473 vec<constructor_elt, va_gc> *v;
4474 unsigned HOST_WIDE_INT ix;
4475 tree value;
4476
4477 /* Build a VECTOR_CST from a *constant* array constructor. */
4478 if (TREE_CONSTANT (expr))
4479 {
4480 bool constant_p = true;
4481
4482 /* Iterate through elements and check if all constructor
4483 elements are *_CSTs. */
4484 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4485 if (!CONSTANT_CLASS_P (value))
4486 {
4487 constant_p = false;
4488 break;
4489 }
4490
4491 if (constant_p)
4492 return build_vector_from_ctor (type,
4493 CONSTRUCTOR_ELTS (expr));
4494 }
4495
4496 /* Otherwise, build a regular vector constructor. */
4497 vec_alloc (v, len);
4498 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4499 {
4500 constructor_elt elt = {NULL_TREE, value};
4501 v->quick_push (elt);
4502 }
4503 expr = copy_node (expr);
4504 TREE_TYPE (expr) = type;
4505 CONSTRUCTOR_ELTS (expr) = v;
4506 return expr;
4507 }
4508 break;
4509
4510 case UNCONSTRAINED_ARRAY_REF:
4511 /* First retrieve the underlying array. */
4512 expr = maybe_unconstrained_array (expr);
4513 etype = TREE_TYPE (expr);
4514 ecode = TREE_CODE (etype);
4515 break;
4516
4517 case VIEW_CONVERT_EXPR:
4518 {
4519 /* GCC 4.x is very sensitive to type consistency overall, and view
4520 conversions thus are very frequent. Even though just "convert"ing
4521 the inner operand to the output type is fine in most cases, it
4522 might expose unexpected input/output type mismatches in special
4523 circumstances so we avoid such recursive calls when we can. */
4524 tree op0 = TREE_OPERAND (expr, 0);
4525
4526 /* If we are converting back to the original type, we can just
4527 lift the input conversion. This is a common occurrence with
4528 switches back-and-forth amongst type variants. */
4529 if (type == TREE_TYPE (op0))
4530 return op0;
4531
4532 /* Otherwise, if we're converting between two aggregate or vector
4533 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4534 target type in place or to just convert the inner expression. */
4535 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4536 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4537 {
4538 /* If we are converting between mere variants, we can just
4539 substitute the VIEW_CONVERT_EXPR in place. */
4540 if (gnat_types_compatible_p (type, etype))
4541 return build1 (VIEW_CONVERT_EXPR, type, op0);
4542
4543 /* Otherwise, we may just bypass the input view conversion unless
4544 one of the types is a fat pointer, which is handled by
4545 specialized code below which relies on exact type matching. */
4546 else if (!TYPE_IS_FAT_POINTER_P (type)
4547 && !TYPE_IS_FAT_POINTER_P (etype))
4548 return convert (type, op0);
4549 }
4550
4551 break;
4552 }
4553
4554 default:
4555 break;
4556 }
4557
4558 /* Check for converting to a pointer to an unconstrained array. */
4559 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4560 return convert_to_fat_pointer (type, expr);
4561
4562 /* If we are converting between two aggregate or vector types that are mere
4563 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4564 to a vector type from its representative array type. */
4565 else if ((code == ecode
4566 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4567 && gnat_types_compatible_p (type, etype))
4568 || (code == VECTOR_TYPE
4569 && ecode == ARRAY_TYPE
4570 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4571 etype)))
4572 return build1 (VIEW_CONVERT_EXPR, type, expr);
4573
4574 /* If we are converting between tagged types, try to upcast properly. */
4575 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4576 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4577 {
4578 tree child_etype = etype;
4579 do {
4580 tree field = TYPE_FIELDS (child_etype);
4581 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4582 return build_component_ref (expr, field, false);
4583 child_etype = TREE_TYPE (field);
4584 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4585 }
4586
4587 /* If we are converting from a smaller form of record type back to it, just
4588 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4589 size on both sides. */
4590 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4591 && smaller_form_type_p (etype, type))
4592 {
4593 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4594 false, false, false, true),
4595 expr);
4596 return build1 (VIEW_CONVERT_EXPR, type, expr);
4597 }
4598
4599 /* In all other cases of related types, make a NOP_EXPR. */
4600 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4601 return fold_convert (type, expr);
4602
4603 switch (code)
4604 {
4605 case VOID_TYPE:
4606 return fold_build1 (CONVERT_EXPR, type, expr);
4607
4608 case INTEGER_TYPE:
4609 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4610 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4611 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4612 return unchecked_convert (type, expr, false);
4613
4614 /* If the output is a biased type, convert first to the base type and
4615 subtract the bias. Note that the bias itself must go through a full
4616 conversion to the base type, lest it is a biased value; this happens
4617 for subtypes of biased types. */
4618 if (TYPE_BIASED_REPRESENTATION_P (type))
4619 return fold_convert (type,
4620 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4621 convert (TREE_TYPE (type), expr),
4622 convert (TREE_TYPE (type),
4623 TYPE_MIN_VALUE (type))));
4624
4625 /* ... fall through ... */
4626
4627 case ENUMERAL_TYPE:
4628 case BOOLEAN_TYPE:
4629 /* If we are converting an additive expression to an integer type
4630 with lower precision, be wary of the optimization that can be
4631 applied by convert_to_integer. There are 2 problematic cases:
4632 - if the first operand was originally of a biased type,
4633 because we could be recursively called to convert it
4634 to an intermediate type and thus rematerialize the
4635 additive operator endlessly,
4636 - if the expression contains a placeholder, because an
4637 intermediate conversion that changes the sign could
4638 be inserted and thus introduce an artificial overflow
4639 at compile time when the placeholder is substituted. */
4640 if (code == INTEGER_TYPE
4641 && ecode == INTEGER_TYPE
4642 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4643 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4644 {
4645 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4646
4647 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4648 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4649 || CONTAINS_PLACEHOLDER_P (expr))
4650 return build1 (NOP_EXPR, type, expr);
4651 }
4652
4653 return fold (convert_to_integer (type, expr));
4654
4655 case POINTER_TYPE:
4656 case REFERENCE_TYPE:
4657 /* If converting between two thin pointers, adjust if needed to account
4658 for differing offsets from the base pointer, depending on whether
4659 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4660 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4661 {
4662 tree etype_pos
4663 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4664 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4665 : size_zero_node;
4666 tree type_pos
4667 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4668 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4669 : size_zero_node;
4670 tree byte_diff = size_diffop (type_pos, etype_pos);
4671
4672 expr = build1 (NOP_EXPR, type, expr);
4673 if (integer_zerop (byte_diff))
4674 return expr;
4675
4676 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4677 fold_convert (sizetype, byte_diff));
4678 }
4679
4680 /* If converting fat pointer to normal or thin pointer, get the pointer
4681 to the array and then convert it. */
4682 if (TYPE_IS_FAT_POINTER_P (etype))
4683 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4684
4685 return fold (convert_to_pointer (type, expr));
4686
4687 case REAL_TYPE:
4688 return fold (convert_to_real (type, expr));
4689
4690 case RECORD_TYPE:
4691 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4692 {
4693 vec<constructor_elt, va_gc> *v;
4694 vec_alloc (v, 1);
4695
4696 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4697 convert (TREE_TYPE (TYPE_FIELDS (type)),
4698 expr));
4699 return gnat_build_constructor (type, v);
4700 }
4701
4702 /* ... fall through ... */
4703
4704 case ARRAY_TYPE:
4705 /* In these cases, assume the front-end has validated the conversion.
4706 If the conversion is valid, it will be a bit-wise conversion, so
4707 it can be viewed as an unchecked conversion. */
4708 return unchecked_convert (type, expr, false);
4709
4710 case UNION_TYPE:
4711 /* This is a either a conversion between a tagged type and some
4712 subtype, which we have to mark as a UNION_TYPE because of
4713 overlapping fields or a conversion of an Unchecked_Union. */
4714 return unchecked_convert (type, expr, false);
4715
4716 case UNCONSTRAINED_ARRAY_TYPE:
4717 /* If the input is a VECTOR_TYPE, convert to the representative
4718 array type first. */
4719 if (ecode == VECTOR_TYPE)
4720 {
4721 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4722 etype = TREE_TYPE (expr);
4723 ecode = TREE_CODE (etype);
4724 }
4725
4726 /* If EXPR is a constrained array, take its address, convert it to a
4727 fat pointer, and then dereference it. Likewise if EXPR is a
4728 record containing both a template and a constrained array.
4729 Note that a record representing a justified modular type
4730 always represents a packed constrained array. */
4731 if (ecode == ARRAY_TYPE
4732 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4733 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4734 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4735 return
4736 build_unary_op
4737 (INDIRECT_REF, NULL_TREE,
4738 convert_to_fat_pointer (TREE_TYPE (type),
4739 build_unary_op (ADDR_EXPR,
4740 NULL_TREE, expr)));
4741
4742 /* Do something very similar for converting one unconstrained
4743 array to another. */
4744 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4745 return
4746 build_unary_op (INDIRECT_REF, NULL_TREE,
4747 convert (TREE_TYPE (type),
4748 build_unary_op (ADDR_EXPR,
4749 NULL_TREE, expr)));
4750 else
4751 gcc_unreachable ();
4752
4753 case COMPLEX_TYPE:
4754 return fold (convert_to_complex (type, expr));
4755
4756 default:
4757 gcc_unreachable ();
4758 }
4759 }
4760
4761 /* Create an expression whose value is that of EXPR converted to the common
4762 index type, which is sizetype. EXPR is supposed to be in the base type
4763 of the GNAT index type. Calling it is equivalent to doing
4764
4765 convert (sizetype, expr)
4766
4767 but we try to distribute the type conversion with the knowledge that EXPR
4768 cannot overflow in its type. This is a best-effort approach and we fall
4769 back to the above expression as soon as difficulties are encountered.
4770
4771 This is necessary to overcome issues that arise when the GNAT base index
4772 type and the GCC common index type (sizetype) don't have the same size,
4773 which is quite frequent on 64-bit architectures. In this case, and if
4774 the GNAT base index type is signed but the iteration type of the loop has
4775 been forced to unsigned, the loop scalar evolution engine cannot compute
4776 a simple evolution for the general induction variables associated with the
4777 array indices, because it will preserve the wrap-around semantics in the
4778 unsigned type of their "inner" part. As a result, many loop optimizations
4779 are blocked.
4780
4781 The solution is to use a special (basic) induction variable that is at
4782 least as large as sizetype, and to express the aforementioned general
4783 induction variables in terms of this induction variable, eliminating
4784 the problematic intermediate truncation to the GNAT base index type.
4785 This is possible as long as the original expression doesn't overflow
4786 and if the middle-end hasn't introduced artificial overflows in the
4787 course of the various simplification it can make to the expression. */
4788
4789 tree
4790 convert_to_index_type (tree expr)
4791 {
4792 enum tree_code code = TREE_CODE (expr);
4793 tree type = TREE_TYPE (expr);
4794
4795 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4796 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4797 if (TYPE_UNSIGNED (type) || !optimize)
4798 return convert (sizetype, expr);
4799
4800 switch (code)
4801 {
4802 case VAR_DECL:
4803 /* The main effect of the function: replace a loop parameter with its
4804 associated special induction variable. */
4805 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4806 expr = DECL_INDUCTION_VAR (expr);
4807 break;
4808
4809 CASE_CONVERT:
4810 {
4811 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4812 /* Bail out as soon as we suspect some sort of type frobbing. */
4813 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4814 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4815 break;
4816 }
4817
4818 /* ... fall through ... */
4819
4820 case NON_LVALUE_EXPR:
4821 return fold_build1 (code, sizetype,
4822 convert_to_index_type (TREE_OPERAND (expr, 0)));
4823
4824 case PLUS_EXPR:
4825 case MINUS_EXPR:
4826 case MULT_EXPR:
4827 return fold_build2 (code, sizetype,
4828 convert_to_index_type (TREE_OPERAND (expr, 0)),
4829 convert_to_index_type (TREE_OPERAND (expr, 1)));
4830
4831 case COMPOUND_EXPR:
4832 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4833 convert_to_index_type (TREE_OPERAND (expr, 1)));
4834
4835 case COND_EXPR:
4836 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4837 convert_to_index_type (TREE_OPERAND (expr, 1)),
4838 convert_to_index_type (TREE_OPERAND (expr, 2)));
4839
4840 default:
4841 break;
4842 }
4843
4844 return convert (sizetype, expr);
4845 }
4846 \f
4847 /* Remove all conversions that are done in EXP. This includes converting
4848 from a padded type or to a justified modular type. If TRUE_ADDRESS
4849 is true, always return the address of the containing object even if
4850 the address is not bit-aligned. */
4851
4852 tree
4853 remove_conversions (tree exp, bool true_address)
4854 {
4855 switch (TREE_CODE (exp))
4856 {
4857 case CONSTRUCTOR:
4858 if (true_address
4859 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4860 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4861 return
4862 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4863 break;
4864
4865 case COMPONENT_REF:
4866 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4867 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4868 break;
4869
4870 CASE_CONVERT:
4871 case VIEW_CONVERT_EXPR:
4872 case NON_LVALUE_EXPR:
4873 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4874
4875 default:
4876 break;
4877 }
4878
4879 return exp;
4880 }
4881 \f
4882 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4883 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4884 likewise return an expression pointing to the underlying array. */
4885
4886 tree
4887 maybe_unconstrained_array (tree exp)
4888 {
4889 enum tree_code code = TREE_CODE (exp);
4890 tree type = TREE_TYPE (exp);
4891
4892 switch (TREE_CODE (type))
4893 {
4894 case UNCONSTRAINED_ARRAY_TYPE:
4895 if (code == UNCONSTRAINED_ARRAY_REF)
4896 {
4897 const bool read_only = TREE_READONLY (exp);
4898 const bool no_trap = TREE_THIS_NOTRAP (exp);
4899
4900 exp = TREE_OPERAND (exp, 0);
4901 type = TREE_TYPE (exp);
4902
4903 if (TREE_CODE (exp) == COND_EXPR)
4904 {
4905 tree op1
4906 = build_unary_op (INDIRECT_REF, NULL_TREE,
4907 build_component_ref (TREE_OPERAND (exp, 1),
4908 TYPE_FIELDS (type),
4909 false));
4910 tree op2
4911 = build_unary_op (INDIRECT_REF, NULL_TREE,
4912 build_component_ref (TREE_OPERAND (exp, 2),
4913 TYPE_FIELDS (type),
4914 false));
4915
4916 exp = build3 (COND_EXPR,
4917 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4918 TREE_OPERAND (exp, 0), op1, op2);
4919 }
4920 else
4921 {
4922 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4923 build_component_ref (exp,
4924 TYPE_FIELDS (type),
4925 false));
4926 TREE_READONLY (exp) = read_only;
4927 TREE_THIS_NOTRAP (exp) = no_trap;
4928 }
4929 }
4930
4931 else if (code == NULL_EXPR)
4932 exp = build1 (NULL_EXPR,
4933 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4934 TREE_OPERAND (exp, 0));
4935 break;
4936
4937 case RECORD_TYPE:
4938 /* If this is a padded type and it contains a template, convert to the
4939 unpadded type first. */
4940 if (TYPE_PADDING_P (type)
4941 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4942 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4943 {
4944 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4945 code = TREE_CODE (exp);
4946 type = TREE_TYPE (exp);
4947 }
4948
4949 if (TYPE_CONTAINS_TEMPLATE_P (type))
4950 {
4951 /* If the array initializer is a box, return NULL_TREE. */
4952 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
4953 return NULL_TREE;
4954
4955 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
4956 false);
4957 type = TREE_TYPE (exp);
4958
4959 /* If the array type is padded, convert to the unpadded type. */
4960 if (TYPE_IS_PADDING_P (type))
4961 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4962 }
4963 break;
4964
4965 default:
4966 break;
4967 }
4968
4969 return exp;
4970 }
4971 \f
4972 /* Return true if EXPR is an expression that can be folded as an operand
4973 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4974
4975 static bool
4976 can_fold_for_view_convert_p (tree expr)
4977 {
4978 tree t1, t2;
4979
4980 /* The folder will fold NOP_EXPRs between integral types with the same
4981 precision (in the middle-end's sense). We cannot allow it if the
4982 types don't have the same precision in the Ada sense as well. */
4983 if (TREE_CODE (expr) != NOP_EXPR)
4984 return true;
4985
4986 t1 = TREE_TYPE (expr);
4987 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4988
4989 /* Defer to the folder for non-integral conversions. */
4990 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4991 return true;
4992
4993 /* Only fold conversions that preserve both precisions. */
4994 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4995 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4996 return true;
4997
4998 return false;
4999 }
5000
5001 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5002 If NOTRUNC_P is true, truncation operations should be suppressed.
5003
5004 Special care is required with (source or target) integral types whose
5005 precision is not equal to their size, to make sure we fetch or assign
5006 the value bits whose location might depend on the endianness, e.g.
5007
5008 Rmsize : constant := 8;
5009 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5010
5011 type Bit_Array is array (1 .. Rmsize) of Boolean;
5012 pragma Pack (Bit_Array);
5013
5014 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5015
5016 Value : Int := 2#1000_0001#;
5017 Vbits : Bit_Array := To_Bit_Array (Value);
5018
5019 we expect the 8 bits at Vbits'Address to always contain Value, while
5020 their original location depends on the endianness, at Value'Address
5021 on a little-endian architecture but not on a big-endian one.
5022
5023 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5024 the bits between the precision and the size are filled, because of the
5025 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5026 So we use the special predicate type_unsigned_for_rm above. */
5027
5028 tree
5029 unchecked_convert (tree type, tree expr, bool notrunc_p)
5030 {
5031 tree etype = TREE_TYPE (expr);
5032 enum tree_code ecode = TREE_CODE (etype);
5033 enum tree_code code = TREE_CODE (type);
5034 tree tem;
5035 int c;
5036
5037 /* If the expression is already of the right type, we are done. */
5038 if (etype == type)
5039 return expr;
5040
5041 /* If both types are integral just do a normal conversion.
5042 Likewise for a conversion to an unconstrained array. */
5043 if (((INTEGRAL_TYPE_P (type)
5044 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5045 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5046 && (INTEGRAL_TYPE_P (etype)
5047 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5048 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5049 || code == UNCONSTRAINED_ARRAY_TYPE)
5050 {
5051 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5052 {
5053 tree ntype = copy_type (etype);
5054 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5055 TYPE_MAIN_VARIANT (ntype) = ntype;
5056 expr = build1 (NOP_EXPR, ntype, expr);
5057 }
5058
5059 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5060 {
5061 tree rtype = copy_type (type);
5062 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5063 TYPE_MAIN_VARIANT (rtype) = rtype;
5064 expr = convert (rtype, expr);
5065 expr = build1 (NOP_EXPR, type, expr);
5066 }
5067 else
5068 expr = convert (type, expr);
5069 }
5070
5071 /* If we are converting to an integral type whose precision is not equal
5072 to its size, first unchecked convert to a record type that contains a
5073 field of the given precision. Then extract the result from the field.
5074
5075 There is a subtlety if the source type is an aggregate type with reverse
5076 storage order because its representation is not contiguous in the native
5077 storage order, i.e. a direct unchecked conversion to an integral type
5078 with N bits of precision cannot read the first N bits of the aggregate
5079 type. To overcome it, we do an unchecked conversion to an integral type
5080 with reverse storage order and return the resulting value. This also
5081 ensures that the result of the unchecked conversion doesn't depend on
5082 the endianness of the target machine, but only on the storage order of
5083 the aggregate type.
5084
5085 Finally, for the sake of consistency, we do the unchecked conversion
5086 to an integral type with reverse storage order as soon as the source
5087 type is an aggregate type with reverse storage order, even if there
5088 are no considerations of precision or size involved. */
5089 else if (INTEGRAL_TYPE_P (type)
5090 && TYPE_RM_SIZE (type)
5091 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5092 TYPE_SIZE (type)) < 0
5093 || (AGGREGATE_TYPE_P (etype)
5094 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5095 {
5096 tree rec_type = make_node (RECORD_TYPE);
5097 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5098 tree field_type, field;
5099
5100 if (AGGREGATE_TYPE_P (etype))
5101 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5102 = TYPE_REVERSE_STORAGE_ORDER (etype);
5103
5104 if (type_unsigned_for_rm (type))
5105 field_type = make_unsigned_type (prec);
5106 else
5107 field_type = make_signed_type (prec);
5108 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5109
5110 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5111 NULL_TREE, bitsize_zero_node, 1, 0);
5112
5113 finish_record_type (rec_type, field, 1, false);
5114
5115 expr = unchecked_convert (rec_type, expr, notrunc_p);
5116 expr = build_component_ref (expr, field, false);
5117 expr = fold_build1 (NOP_EXPR, type, expr);
5118 }
5119
5120 /* Similarly if we are converting from an integral type whose precision is
5121 not equal to its size, first copy into a field of the given precision
5122 and unchecked convert the record type.
5123
5124 The same considerations as above apply if the target type is an aggregate
5125 type with reverse storage order and we also proceed similarly. */
5126 else if (INTEGRAL_TYPE_P (etype)
5127 && TYPE_RM_SIZE (etype)
5128 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5129 TYPE_SIZE (etype)) < 0
5130 || (AGGREGATE_TYPE_P (type)
5131 && TYPE_REVERSE_STORAGE_ORDER (type))))
5132 {
5133 tree rec_type = make_node (RECORD_TYPE);
5134 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5135 vec<constructor_elt, va_gc> *v;
5136 vec_alloc (v, 1);
5137 tree field_type, field;
5138
5139 if (AGGREGATE_TYPE_P (type))
5140 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5141 = TYPE_REVERSE_STORAGE_ORDER (type);
5142
5143 if (type_unsigned_for_rm (etype))
5144 field_type = make_unsigned_type (prec);
5145 else
5146 field_type = make_signed_type (prec);
5147 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5148
5149 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5150 NULL_TREE, bitsize_zero_node, 1, 0);
5151
5152 finish_record_type (rec_type, field, 1, false);
5153
5154 expr = fold_build1 (NOP_EXPR, field_type, expr);
5155 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5156 expr = gnat_build_constructor (rec_type, v);
5157 expr = unchecked_convert (type, expr, notrunc_p);
5158 }
5159
5160 /* If we are converting from a scalar type to a type with a different size,
5161 we need to pad to have the same size on both sides.
5162
5163 ??? We cannot do it unconditionally because unchecked conversions are
5164 used liberally by the front-end to implement polymorphism, e.g. in:
5165
5166 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5167 return p___size__4 (p__object!(S191s.all));
5168
5169 so we skip all expressions that are references. */
5170 else if (!REFERENCE_CLASS_P (expr)
5171 && !AGGREGATE_TYPE_P (etype)
5172 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5173 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5174 {
5175 if (c < 0)
5176 {
5177 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5178 false, false, false, true),
5179 expr);
5180 expr = unchecked_convert (type, expr, notrunc_p);
5181 }
5182 else
5183 {
5184 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5185 false, false, false, true);
5186 expr = unchecked_convert (rec_type, expr, notrunc_p);
5187 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5188 }
5189 }
5190
5191 /* We have a special case when we are converting between two unconstrained
5192 array types. In that case, take the address, convert the fat pointer
5193 types, and dereference. */
5194 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5195 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5196 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5197 build_unary_op (ADDR_EXPR, NULL_TREE,
5198 expr)));
5199
5200 /* Another special case is when we are converting to a vector type from its
5201 representative array type; this a regular conversion. */
5202 else if (code == VECTOR_TYPE
5203 && ecode == ARRAY_TYPE
5204 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5205 etype))
5206 expr = convert (type, expr);
5207
5208 /* And, if the array type is not the representative, we try to build an
5209 intermediate vector type of which the array type is the representative
5210 and to do the unchecked conversion between the vector types, in order
5211 to enable further simplifications in the middle-end. */
5212 else if (code == VECTOR_TYPE
5213 && ecode == ARRAY_TYPE
5214 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5215 {
5216 expr = convert (tem, expr);
5217 return unchecked_convert (type, expr, notrunc_p);
5218 }
5219
5220 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5221 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5222 else if (TREE_CODE (expr) == CONSTRUCTOR
5223 && code == RECORD_TYPE
5224 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5225 {
5226 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5227 Empty, false, false, false, true),
5228 expr);
5229 return unchecked_convert (type, expr, notrunc_p);
5230 }
5231
5232 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5233 else
5234 {
5235 expr = maybe_unconstrained_array (expr);
5236 etype = TREE_TYPE (expr);
5237 ecode = TREE_CODE (etype);
5238 if (can_fold_for_view_convert_p (expr))
5239 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5240 else
5241 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5242 }
5243
5244 /* If the result is a non-biased integral type whose precision is not equal
5245 to its size, sign- or zero-extend the result. But we need not do this
5246 if the input is also an integral type and both are unsigned or both are
5247 signed and have the same precision. */
5248 if (!notrunc_p
5249 && INTEGRAL_TYPE_P (type)
5250 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5251 && TYPE_RM_SIZE (type)
5252 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5253 && !(INTEGRAL_TYPE_P (etype)
5254 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5255 && (type_unsigned_for_rm (type)
5256 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5257 TYPE_RM_SIZE (etype)
5258 ? TYPE_RM_SIZE (etype)
5259 : TYPE_SIZE (etype)) == 0)))
5260 {
5261 if (integer_zerop (TYPE_RM_SIZE (type)))
5262 expr = build_int_cst (type, 0);
5263 else
5264 {
5265 tree base_type
5266 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5267 type_unsigned_for_rm (type));
5268 tree shift_expr
5269 = convert (base_type,
5270 size_binop (MINUS_EXPR,
5271 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5272 expr
5273 = convert (type,
5274 build_binary_op (RSHIFT_EXPR, base_type,
5275 build_binary_op (LSHIFT_EXPR, base_type,
5276 convert (base_type,
5277 expr),
5278 shift_expr),
5279 shift_expr));
5280 }
5281 }
5282
5283 /* An unchecked conversion should never raise Constraint_Error. The code
5284 below assumes that GCC's conversion routines overflow the same way that
5285 the underlying hardware does. This is probably true. In the rare case
5286 when it is false, we can rely on the fact that such conversions are
5287 erroneous anyway. */
5288 if (TREE_CODE (expr) == INTEGER_CST)
5289 TREE_OVERFLOW (expr) = 0;
5290
5291 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5292 show no longer constant. */
5293 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5294 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5295 OEP_ONLY_CONST))
5296 TREE_CONSTANT (expr) = 0;
5297
5298 return expr;
5299 }
5300 \f
5301 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5302 the latter being a record type as predicated by Is_Record_Type. */
5303
5304 enum tree_code
5305 tree_code_for_record_type (Entity_Id gnat_type)
5306 {
5307 Node_Id component_list, component;
5308
5309 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5310 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5311 if (!Is_Unchecked_Union (gnat_type))
5312 return RECORD_TYPE;
5313
5314 gnat_type = Implementation_Base_Type (gnat_type);
5315 component_list
5316 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5317
5318 for (component = First_Non_Pragma (Component_Items (component_list));
5319 Present (component);
5320 component = Next_Non_Pragma (component))
5321 if (Ekind (Defining_Entity (component)) == E_Component)
5322 return RECORD_TYPE;
5323
5324 return UNION_TYPE;
5325 }
5326
5327 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5328 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5329 according to the presence of an alignment clause on the type or, if it
5330 is an array, on the component type. */
5331
5332 bool
5333 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5334 {
5335 gnat_type = Underlying_Type (gnat_type);
5336
5337 *align_clause = Present (Alignment_Clause (gnat_type));
5338
5339 if (Is_Array_Type (gnat_type))
5340 {
5341 gnat_type = Underlying_Type (Component_Type (gnat_type));
5342 if (Present (Alignment_Clause (gnat_type)))
5343 *align_clause = true;
5344 }
5345
5346 if (!Is_Floating_Point_Type (gnat_type))
5347 return false;
5348
5349 if (UI_To_Int (Esize (gnat_type)) != 64)
5350 return false;
5351
5352 return true;
5353 }
5354
5355 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5356 size is greater or equal to 64 bits, or an array of such a type. Set
5357 ALIGN_CLAUSE according to the presence of an alignment clause on the
5358 type or, if it is an array, on the component type. */
5359
5360 bool
5361 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5362 {
5363 gnat_type = Underlying_Type (gnat_type);
5364
5365 *align_clause = Present (Alignment_Clause (gnat_type));
5366
5367 if (Is_Array_Type (gnat_type))
5368 {
5369 gnat_type = Underlying_Type (Component_Type (gnat_type));
5370 if (Present (Alignment_Clause (gnat_type)))
5371 *align_clause = true;
5372 }
5373
5374 if (!Is_Scalar_Type (gnat_type))
5375 return false;
5376
5377 if (UI_To_Int (Esize (gnat_type)) < 64)
5378 return false;
5379
5380 return true;
5381 }
5382
5383 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5384 component of an aggregate type. */
5385
5386 bool
5387 type_for_nonaliased_component_p (tree gnu_type)
5388 {
5389 /* If the type is passed by reference, we may have pointers to the
5390 component so it cannot be made non-aliased. */
5391 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5392 return false;
5393
5394 /* We used to say that any component of aggregate type is aliased
5395 because the front-end may take 'Reference of it. The front-end
5396 has been enhanced in the meantime so as to use a renaming instead
5397 in most cases, but the back-end can probably take the address of
5398 such a component too so we go for the conservative stance.
5399
5400 For instance, we might need the address of any array type, even
5401 if normally passed by copy, to construct a fat pointer if the
5402 component is used as an actual for an unconstrained formal.
5403
5404 Likewise for record types: even if a specific record subtype is
5405 passed by copy, the parent type might be passed by ref (e.g. if
5406 it's of variable size) and we might take the address of a child
5407 component to pass to a parent formal. We have no way to check
5408 for such conditions here. */
5409 if (AGGREGATE_TYPE_P (gnu_type))
5410 return false;
5411
5412 return true;
5413 }
5414
5415 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5416
5417 bool
5418 smaller_form_type_p (tree type, tree orig_type)
5419 {
5420 tree size, osize;
5421
5422 /* We're not interested in variants here. */
5423 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5424 return false;
5425
5426 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5427 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5428 return false;
5429
5430 size = TYPE_SIZE (type);
5431 osize = TYPE_SIZE (orig_type);
5432
5433 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5434 return false;
5435
5436 return tree_int_cst_lt (size, osize) != 0;
5437 }
5438
5439 /* Return whether EXPR, which is the renamed object in an object renaming
5440 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5441 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5442
5443 bool
5444 can_materialize_object_renaming_p (Node_Id expr)
5445 {
5446 while (true)
5447 {
5448 expr = Original_Node (expr);
5449
5450 switch Nkind (expr)
5451 {
5452 case N_Identifier:
5453 case N_Expanded_Name:
5454 if (!Present (Renamed_Object (Entity (expr))))
5455 return true;
5456 expr = Renamed_Object (Entity (expr));
5457 break;
5458
5459 case N_Selected_Component:
5460 {
5461 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5462 return false;
5463
5464 const Uint bitpos
5465 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5466 if (!UI_Is_In_Int_Range (bitpos)
5467 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5468 return false;
5469
5470 expr = Prefix (expr);
5471 break;
5472 }
5473
5474 case N_Indexed_Component:
5475 case N_Slice:
5476 {
5477 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5478
5479 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5480 return false;
5481
5482 expr = Prefix (expr);
5483 break;
5484 }
5485
5486 case N_Explicit_Dereference:
5487 expr = Prefix (expr);
5488 break;
5489
5490 default:
5491 return true;
5492 };
5493 }
5494 }
5495
5496 /* Perform final processing on global declarations. */
5497
5498 static GTY (()) tree dummy_global;
5499
5500 void
5501 gnat_write_global_declarations (void)
5502 {
5503 unsigned int i;
5504 tree iter;
5505
5506 /* If we have declared types as used at the global level, insert them in
5507 the global hash table. We use a dummy variable for this purpose, but
5508 we need to build it unconditionally to avoid -fcompare-debug issues. */
5509 if (first_global_object_name)
5510 {
5511 struct varpool_node *node;
5512 char *label;
5513
5514 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5515 dummy_global
5516 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5517 void_type_node);
5518 DECL_HARD_REGISTER (dummy_global) = 1;
5519 TREE_STATIC (dummy_global) = 1;
5520 node = varpool_node::get_create (dummy_global);
5521 node->definition = 1;
5522 node->force_output = 1;
5523
5524 if (types_used_by_cur_var_decl)
5525 while (!types_used_by_cur_var_decl->is_empty ())
5526 {
5527 tree t = types_used_by_cur_var_decl->pop ();
5528 types_used_by_var_decl_insert (t, dummy_global);
5529 }
5530 }
5531
5532 /* Output debug information for all global type declarations first. This
5533 ensures that global types whose compilation hasn't been finalized yet,
5534 for example pointers to Taft amendment types, have their compilation
5535 finalized in the right context. */
5536 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5537 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5538 debug_hooks->type_decl (iter, false);
5539
5540 /* Output imported functions. */
5541 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5542 if (TREE_CODE (iter) == FUNCTION_DECL
5543 && DECL_EXTERNAL (iter)
5544 && DECL_INITIAL (iter) == NULL
5545 && !DECL_IGNORED_P (iter)
5546 && DECL_FUNCTION_IS_DEF (iter))
5547 debug_hooks->early_global_decl (iter);
5548
5549 /* Then output the global variables. We need to do that after the debug
5550 information for global types is emitted so that they are finalized. Skip
5551 external global variables, unless we need to emit debug info for them:
5552 this is useful for imported variables, for instance. */
5553 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5554 if (TREE_CODE (iter) == VAR_DECL
5555 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5556 rest_of_decl_compilation (iter, true, 0);
5557
5558 /* Output the imported modules/declarations. In GNAT, these are only
5559 materializing subprogram. */
5560 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5561 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5562 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5563 DECL_CONTEXT (iter), false, false);
5564 }
5565
5566 /* ************************************************************************
5567 * * GCC builtins support *
5568 * ************************************************************************ */
5569
5570 /* The general scheme is fairly simple:
5571
5572 For each builtin function/type to be declared, gnat_install_builtins calls
5573 internal facilities which eventually get to gnat_pushdecl, which in turn
5574 tracks the so declared builtin function decls in the 'builtin_decls' global
5575 datastructure. When an Intrinsic subprogram declaration is processed, we
5576 search this global datastructure to retrieve the associated BUILT_IN DECL
5577 node. */
5578
5579 /* Search the chain of currently available builtin declarations for a node
5580 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5581 found, if any, or NULL_TREE otherwise. */
5582 tree
5583 builtin_decl_for (tree name)
5584 {
5585 unsigned i;
5586 tree decl;
5587
5588 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5589 if (DECL_NAME (decl) == name)
5590 return decl;
5591
5592 return NULL_TREE;
5593 }
5594
5595 /* The code below eventually exposes gnat_install_builtins, which declares
5596 the builtin types and functions we might need, either internally or as
5597 user accessible facilities.
5598
5599 ??? This is a first implementation shot, still in rough shape. It is
5600 heavily inspired from the "C" family implementation, with chunks copied
5601 verbatim from there.
5602
5603 Two obvious improvement candidates are:
5604 o Use a more efficient name/decl mapping scheme
5605 o Devise a middle-end infrastructure to avoid having to copy
5606 pieces between front-ends. */
5607
5608 /* ----------------------------------------------------------------------- *
5609 * BUILTIN ELEMENTARY TYPES *
5610 * ----------------------------------------------------------------------- */
5611
5612 /* Standard data types to be used in builtin argument declarations. */
5613
5614 enum c_tree_index
5615 {
5616 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5617 CTI_STRING_TYPE,
5618 CTI_CONST_STRING_TYPE,
5619
5620 CTI_MAX
5621 };
5622
5623 static tree c_global_trees[CTI_MAX];
5624
5625 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5626 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5627 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5628
5629 /* ??? In addition some attribute handlers, we currently don't support a
5630 (small) number of builtin-types, which in turns inhibits support for a
5631 number of builtin functions. */
5632 #define wint_type_node void_type_node
5633 #define intmax_type_node void_type_node
5634 #define uintmax_type_node void_type_node
5635
5636 /* Used to help initialize the builtin-types.def table. When a type of
5637 the correct size doesn't exist, use error_mark_node instead of NULL.
5638 The later results in segfaults even when a decl using the type doesn't
5639 get invoked. */
5640
5641 static tree
5642 builtin_type_for_size (int size, bool unsignedp)
5643 {
5644 tree type = gnat_type_for_size (size, unsignedp);
5645 return type ? type : error_mark_node;
5646 }
5647
5648 /* Build/push the elementary type decls that builtin functions/types
5649 will need. */
5650
5651 static void
5652 install_builtin_elementary_types (void)
5653 {
5654 signed_size_type_node = gnat_signed_type_for (size_type_node);
5655 pid_type_node = integer_type_node;
5656
5657 string_type_node = build_pointer_type (char_type_node);
5658 const_string_type_node
5659 = build_pointer_type (build_qualified_type
5660 (char_type_node, TYPE_QUAL_CONST));
5661 }
5662
5663 /* ----------------------------------------------------------------------- *
5664 * BUILTIN FUNCTION TYPES *
5665 * ----------------------------------------------------------------------- */
5666
5667 /* Now, builtin function types per se. */
5668
5669 enum c_builtin_type
5670 {
5671 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5672 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5673 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5674 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5675 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5676 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5677 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5678 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5679 ARG6) NAME,
5680 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5681 ARG6, ARG7) NAME,
5682 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5683 ARG6, ARG7, ARG8) NAME,
5684 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5685 ARG6, ARG7, ARG8, ARG9) NAME,
5686 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5687 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5688 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5689 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5690 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5691 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5692 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5693 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5694 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5695 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5696 NAME,
5697 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5698 ARG6) NAME,
5699 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5700 ARG6, ARG7) NAME,
5701 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5702 #include "builtin-types.def"
5703 #undef DEF_PRIMITIVE_TYPE
5704 #undef DEF_FUNCTION_TYPE_0
5705 #undef DEF_FUNCTION_TYPE_1
5706 #undef DEF_FUNCTION_TYPE_2
5707 #undef DEF_FUNCTION_TYPE_3
5708 #undef DEF_FUNCTION_TYPE_4
5709 #undef DEF_FUNCTION_TYPE_5
5710 #undef DEF_FUNCTION_TYPE_6
5711 #undef DEF_FUNCTION_TYPE_7
5712 #undef DEF_FUNCTION_TYPE_8
5713 #undef DEF_FUNCTION_TYPE_9
5714 #undef DEF_FUNCTION_TYPE_10
5715 #undef DEF_FUNCTION_TYPE_11
5716 #undef DEF_FUNCTION_TYPE_VAR_0
5717 #undef DEF_FUNCTION_TYPE_VAR_1
5718 #undef DEF_FUNCTION_TYPE_VAR_2
5719 #undef DEF_FUNCTION_TYPE_VAR_3
5720 #undef DEF_FUNCTION_TYPE_VAR_4
5721 #undef DEF_FUNCTION_TYPE_VAR_5
5722 #undef DEF_FUNCTION_TYPE_VAR_6
5723 #undef DEF_FUNCTION_TYPE_VAR_7
5724 #undef DEF_POINTER_TYPE
5725 BT_LAST
5726 };
5727
5728 typedef enum c_builtin_type builtin_type;
5729
5730 /* A temporary array used in communication with def_fn_type. */
5731 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5732
5733 /* A helper function for install_builtin_types. Build function type
5734 for DEF with return type RET and N arguments. If VAR is true, then the
5735 function should be variadic after those N arguments.
5736
5737 Takes special care not to ICE if any of the types involved are
5738 error_mark_node, which indicates that said type is not in fact available
5739 (see builtin_type_for_size). In which case the function type as a whole
5740 should be error_mark_node. */
5741
5742 static void
5743 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5744 {
5745 tree t;
5746 tree *args = XALLOCAVEC (tree, n);
5747 va_list list;
5748 int i;
5749
5750 va_start (list, n);
5751 for (i = 0; i < n; ++i)
5752 {
5753 builtin_type a = (builtin_type) va_arg (list, int);
5754 t = builtin_types[a];
5755 if (t == error_mark_node)
5756 goto egress;
5757 args[i] = t;
5758 }
5759
5760 t = builtin_types[ret];
5761 if (t == error_mark_node)
5762 goto egress;
5763 if (var)
5764 t = build_varargs_function_type_array (t, n, args);
5765 else
5766 t = build_function_type_array (t, n, args);
5767
5768 egress:
5769 builtin_types[def] = t;
5770 va_end (list);
5771 }
5772
5773 /* Build the builtin function types and install them in the builtin_types
5774 array for later use in builtin function decls. */
5775
5776 static void
5777 install_builtin_function_types (void)
5778 {
5779 tree va_list_ref_type_node;
5780 tree va_list_arg_type_node;
5781
5782 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5783 {
5784 va_list_arg_type_node = va_list_ref_type_node =
5785 build_pointer_type (TREE_TYPE (va_list_type_node));
5786 }
5787 else
5788 {
5789 va_list_arg_type_node = va_list_type_node;
5790 va_list_ref_type_node = build_reference_type (va_list_type_node);
5791 }
5792
5793 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5794 builtin_types[ENUM] = VALUE;
5795 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5796 def_fn_type (ENUM, RETURN, 0, 0);
5797 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5798 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5799 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5800 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5801 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5802 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5803 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5804 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5805 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5806 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5807 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5808 ARG6) \
5809 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5810 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5811 ARG6, ARG7) \
5812 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5813 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5814 ARG6, ARG7, ARG8) \
5815 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5816 ARG7, ARG8);
5817 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5818 ARG6, ARG7, ARG8, ARG9) \
5819 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5820 ARG7, ARG8, ARG9);
5821 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5822 ARG6, ARG7, ARG8, ARG9, ARG10) \
5823 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5824 ARG7, ARG8, ARG9, ARG10);
5825 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5826 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5827 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5828 ARG7, ARG8, ARG9, ARG10, ARG11);
5829 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5830 def_fn_type (ENUM, RETURN, 1, 0);
5831 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5832 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5833 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5834 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5835 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5836 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5837 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5838 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5839 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5840 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5841 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5842 ARG6) \
5843 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5844 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5845 ARG6, ARG7) \
5846 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5847 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5848 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5849
5850 #include "builtin-types.def"
5851
5852 #undef DEF_PRIMITIVE_TYPE
5853 #undef DEF_FUNCTION_TYPE_0
5854 #undef DEF_FUNCTION_TYPE_1
5855 #undef DEF_FUNCTION_TYPE_2
5856 #undef DEF_FUNCTION_TYPE_3
5857 #undef DEF_FUNCTION_TYPE_4
5858 #undef DEF_FUNCTION_TYPE_5
5859 #undef DEF_FUNCTION_TYPE_6
5860 #undef DEF_FUNCTION_TYPE_7
5861 #undef DEF_FUNCTION_TYPE_8
5862 #undef DEF_FUNCTION_TYPE_9
5863 #undef DEF_FUNCTION_TYPE_10
5864 #undef DEF_FUNCTION_TYPE_11
5865 #undef DEF_FUNCTION_TYPE_VAR_0
5866 #undef DEF_FUNCTION_TYPE_VAR_1
5867 #undef DEF_FUNCTION_TYPE_VAR_2
5868 #undef DEF_FUNCTION_TYPE_VAR_3
5869 #undef DEF_FUNCTION_TYPE_VAR_4
5870 #undef DEF_FUNCTION_TYPE_VAR_5
5871 #undef DEF_FUNCTION_TYPE_VAR_6
5872 #undef DEF_FUNCTION_TYPE_VAR_7
5873 #undef DEF_POINTER_TYPE
5874 builtin_types[(int) BT_LAST] = NULL_TREE;
5875 }
5876
5877 /* ----------------------------------------------------------------------- *
5878 * BUILTIN ATTRIBUTES *
5879 * ----------------------------------------------------------------------- */
5880
5881 enum built_in_attribute
5882 {
5883 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5884 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5885 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5886 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5887 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5888 #include "builtin-attrs.def"
5889 #undef DEF_ATTR_NULL_TREE
5890 #undef DEF_ATTR_INT
5891 #undef DEF_ATTR_STRING
5892 #undef DEF_ATTR_IDENT
5893 #undef DEF_ATTR_TREE_LIST
5894 ATTR_LAST
5895 };
5896
5897 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5898
5899 static void
5900 install_builtin_attributes (void)
5901 {
5902 /* Fill in the built_in_attributes array. */
5903 #define DEF_ATTR_NULL_TREE(ENUM) \
5904 built_in_attributes[(int) ENUM] = NULL_TREE;
5905 #define DEF_ATTR_INT(ENUM, VALUE) \
5906 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5907 #define DEF_ATTR_STRING(ENUM, VALUE) \
5908 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5909 #define DEF_ATTR_IDENT(ENUM, STRING) \
5910 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5911 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5912 built_in_attributes[(int) ENUM] \
5913 = tree_cons (built_in_attributes[(int) PURPOSE], \
5914 built_in_attributes[(int) VALUE], \
5915 built_in_attributes[(int) CHAIN]);
5916 #include "builtin-attrs.def"
5917 #undef DEF_ATTR_NULL_TREE
5918 #undef DEF_ATTR_INT
5919 #undef DEF_ATTR_STRING
5920 #undef DEF_ATTR_IDENT
5921 #undef DEF_ATTR_TREE_LIST
5922 }
5923
5924 /* Handle a "const" attribute; arguments as in
5925 struct attribute_spec.handler. */
5926
5927 static tree
5928 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5929 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5930 bool *no_add_attrs)
5931 {
5932 if (TREE_CODE (*node) == FUNCTION_DECL)
5933 TREE_READONLY (*node) = 1;
5934 else
5935 *no_add_attrs = true;
5936
5937 return NULL_TREE;
5938 }
5939
5940 /* Handle a "nothrow" attribute; arguments as in
5941 struct attribute_spec.handler. */
5942
5943 static tree
5944 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5945 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5946 bool *no_add_attrs)
5947 {
5948 if (TREE_CODE (*node) == FUNCTION_DECL)
5949 TREE_NOTHROW (*node) = 1;
5950 else
5951 *no_add_attrs = true;
5952
5953 return NULL_TREE;
5954 }
5955
5956 /* Handle a "pure" attribute; arguments as in
5957 struct attribute_spec.handler. */
5958
5959 static tree
5960 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5961 int ARG_UNUSED (flags), bool *no_add_attrs)
5962 {
5963 if (TREE_CODE (*node) == FUNCTION_DECL)
5964 DECL_PURE_P (*node) = 1;
5965 /* TODO: support types. */
5966 else
5967 {
5968 warning (OPT_Wattributes, "%qs attribute ignored",
5969 IDENTIFIER_POINTER (name));
5970 *no_add_attrs = true;
5971 }
5972
5973 return NULL_TREE;
5974 }
5975
5976 /* Handle a "no vops" attribute; arguments as in
5977 struct attribute_spec.handler. */
5978
5979 static tree
5980 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5981 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5982 bool *ARG_UNUSED (no_add_attrs))
5983 {
5984 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5985 DECL_IS_NOVOPS (*node) = 1;
5986 return NULL_TREE;
5987 }
5988
5989 /* Helper for nonnull attribute handling; fetch the operand number
5990 from the attribute argument list. */
5991
5992 static bool
5993 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5994 {
5995 /* Verify the arg number is a constant. */
5996 if (!tree_fits_uhwi_p (arg_num_expr))
5997 return false;
5998
5999 *valp = TREE_INT_CST_LOW (arg_num_expr);
6000 return true;
6001 }
6002
6003 /* Handle the "nonnull" attribute. */
6004 static tree
6005 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6006 tree args, int ARG_UNUSED (flags),
6007 bool *no_add_attrs)
6008 {
6009 tree type = *node;
6010 unsigned HOST_WIDE_INT attr_arg_num;
6011
6012 /* If no arguments are specified, all pointer arguments should be
6013 non-null. Verify a full prototype is given so that the arguments
6014 will have the correct types when we actually check them later.
6015 Avoid diagnosing type-generic built-ins since those have no
6016 prototype. */
6017 if (!args)
6018 {
6019 if (!prototype_p (type)
6020 && (!TYPE_ATTRIBUTES (type)
6021 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6022 {
6023 error ("nonnull attribute without arguments on a non-prototype");
6024 *no_add_attrs = true;
6025 }
6026 return NULL_TREE;
6027 }
6028
6029 /* Argument list specified. Verify that each argument number references
6030 a pointer argument. */
6031 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6032 {
6033 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6034
6035 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6036 {
6037 error ("nonnull argument has invalid operand number (argument %lu)",
6038 (unsigned long) attr_arg_num);
6039 *no_add_attrs = true;
6040 return NULL_TREE;
6041 }
6042
6043 if (prototype_p (type))
6044 {
6045 function_args_iterator iter;
6046 tree argument;
6047
6048 function_args_iter_init (&iter, type);
6049 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6050 {
6051 argument = function_args_iter_cond (&iter);
6052 if (!argument || ck_num == arg_num)
6053 break;
6054 }
6055
6056 if (!argument
6057 || TREE_CODE (argument) == VOID_TYPE)
6058 {
6059 error ("nonnull argument with out-of-range operand number "
6060 "(argument %lu, operand %lu)",
6061 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6062 *no_add_attrs = true;
6063 return NULL_TREE;
6064 }
6065
6066 if (TREE_CODE (argument) != POINTER_TYPE)
6067 {
6068 error ("nonnull argument references non-pointer operand "
6069 "(argument %lu, operand %lu)",
6070 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6071 *no_add_attrs = true;
6072 return NULL_TREE;
6073 }
6074 }
6075 }
6076
6077 return NULL_TREE;
6078 }
6079
6080 /* Handle a "sentinel" attribute. */
6081
6082 static tree
6083 handle_sentinel_attribute (tree *node, tree name, tree args,
6084 int ARG_UNUSED (flags), bool *no_add_attrs)
6085 {
6086 if (!prototype_p (*node))
6087 {
6088 warning (OPT_Wattributes,
6089 "%qs attribute requires prototypes with named arguments",
6090 IDENTIFIER_POINTER (name));
6091 *no_add_attrs = true;
6092 }
6093 else
6094 {
6095 if (!stdarg_p (*node))
6096 {
6097 warning (OPT_Wattributes,
6098 "%qs attribute only applies to variadic functions",
6099 IDENTIFIER_POINTER (name));
6100 *no_add_attrs = true;
6101 }
6102 }
6103
6104 if (args)
6105 {
6106 tree position = TREE_VALUE (args);
6107
6108 if (TREE_CODE (position) != INTEGER_CST)
6109 {
6110 warning (0, "requested position is not an integer constant");
6111 *no_add_attrs = true;
6112 }
6113 else
6114 {
6115 if (tree_int_cst_lt (position, integer_zero_node))
6116 {
6117 warning (0, "requested position is less than zero");
6118 *no_add_attrs = true;
6119 }
6120 }
6121 }
6122
6123 return NULL_TREE;
6124 }
6125
6126 /* Handle a "noreturn" attribute; arguments as in
6127 struct attribute_spec.handler. */
6128
6129 static tree
6130 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6131 int ARG_UNUSED (flags), bool *no_add_attrs)
6132 {
6133 tree type = TREE_TYPE (*node);
6134
6135 /* See FIXME comment in c_common_attribute_table. */
6136 if (TREE_CODE (*node) == FUNCTION_DECL)
6137 TREE_THIS_VOLATILE (*node) = 1;
6138 else if (TREE_CODE (type) == POINTER_TYPE
6139 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6140 TREE_TYPE (*node)
6141 = build_pointer_type
6142 (build_type_variant (TREE_TYPE (type),
6143 TYPE_READONLY (TREE_TYPE (type)), 1));
6144 else
6145 {
6146 warning (OPT_Wattributes, "%qs attribute ignored",
6147 IDENTIFIER_POINTER (name));
6148 *no_add_attrs = true;
6149 }
6150
6151 return NULL_TREE;
6152 }
6153
6154 /* Handle a "noinline" attribute; arguments as in
6155 struct attribute_spec.handler. */
6156
6157 static tree
6158 handle_noinline_attribute (tree *node, tree name,
6159 tree ARG_UNUSED (args),
6160 int ARG_UNUSED (flags), bool *no_add_attrs)
6161 {
6162 if (TREE_CODE (*node) == FUNCTION_DECL)
6163 {
6164 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6165 {
6166 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6167 "with attribute %qs", name, "always_inline");
6168 *no_add_attrs = true;
6169 }
6170 else
6171 DECL_UNINLINABLE (*node) = 1;
6172 }
6173 else
6174 {
6175 warning (OPT_Wattributes, "%qE attribute ignored", name);
6176 *no_add_attrs = true;
6177 }
6178
6179 return NULL_TREE;
6180 }
6181
6182 /* Handle a "noclone" attribute; arguments as in
6183 struct attribute_spec.handler. */
6184
6185 static tree
6186 handle_noclone_attribute (tree *node, tree name,
6187 tree ARG_UNUSED (args),
6188 int ARG_UNUSED (flags), bool *no_add_attrs)
6189 {
6190 if (TREE_CODE (*node) != FUNCTION_DECL)
6191 {
6192 warning (OPT_Wattributes, "%qE attribute ignored", name);
6193 *no_add_attrs = true;
6194 }
6195
6196 return NULL_TREE;
6197 }
6198
6199 /* Handle a "leaf" attribute; arguments as in
6200 struct attribute_spec.handler. */
6201
6202 static tree
6203 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6204 int ARG_UNUSED (flags), bool *no_add_attrs)
6205 {
6206 if (TREE_CODE (*node) != FUNCTION_DECL)
6207 {
6208 warning (OPT_Wattributes, "%qE attribute ignored", name);
6209 *no_add_attrs = true;
6210 }
6211 if (!TREE_PUBLIC (*node))
6212 {
6213 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6214 *no_add_attrs = true;
6215 }
6216
6217 return NULL_TREE;
6218 }
6219
6220 /* Handle a "always_inline" attribute; arguments as in
6221 struct attribute_spec.handler. */
6222
6223 static tree
6224 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6225 int ARG_UNUSED (flags), bool *no_add_attrs)
6226 {
6227 if (TREE_CODE (*node) == FUNCTION_DECL)
6228 {
6229 /* Set the attribute and mark it for disregarding inline limits. */
6230 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6231 }
6232 else
6233 {
6234 warning (OPT_Wattributes, "%qE attribute ignored", name);
6235 *no_add_attrs = true;
6236 }
6237
6238 return NULL_TREE;
6239 }
6240
6241 /* Handle a "malloc" attribute; arguments as in
6242 struct attribute_spec.handler. */
6243
6244 static tree
6245 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6246 int ARG_UNUSED (flags), bool *no_add_attrs)
6247 {
6248 if (TREE_CODE (*node) == FUNCTION_DECL
6249 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6250 DECL_IS_MALLOC (*node) = 1;
6251 else
6252 {
6253 warning (OPT_Wattributes, "%qs attribute ignored",
6254 IDENTIFIER_POINTER (name));
6255 *no_add_attrs = true;
6256 }
6257
6258 return NULL_TREE;
6259 }
6260
6261 /* Fake handler for attributes we don't properly support. */
6262
6263 tree
6264 fake_attribute_handler (tree * ARG_UNUSED (node),
6265 tree ARG_UNUSED (name),
6266 tree ARG_UNUSED (args),
6267 int ARG_UNUSED (flags),
6268 bool * ARG_UNUSED (no_add_attrs))
6269 {
6270 return NULL_TREE;
6271 }
6272
6273 /* Handle a "type_generic" attribute. */
6274
6275 static tree
6276 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6277 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6278 bool * ARG_UNUSED (no_add_attrs))
6279 {
6280 /* Ensure we have a function type. */
6281 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6282
6283 /* Ensure we have a variadic function. */
6284 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6285
6286 return NULL_TREE;
6287 }
6288
6289 /* Handle a "vector_size" attribute; arguments as in
6290 struct attribute_spec.handler. */
6291
6292 static tree
6293 handle_vector_size_attribute (tree *node, tree name, tree args,
6294 int ARG_UNUSED (flags), bool *no_add_attrs)
6295 {
6296 tree type = *node;
6297 tree vector_type;
6298
6299 *no_add_attrs = true;
6300
6301 /* We need to provide for vector pointers, vector arrays, and
6302 functions returning vectors. For example:
6303
6304 __attribute__((vector_size(16))) short *foo;
6305
6306 In this case, the mode is SI, but the type being modified is
6307 HI, so we need to look further. */
6308 while (POINTER_TYPE_P (type)
6309 || TREE_CODE (type) == FUNCTION_TYPE
6310 || TREE_CODE (type) == ARRAY_TYPE)
6311 type = TREE_TYPE (type);
6312
6313 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6314 if (!vector_type)
6315 return NULL_TREE;
6316
6317 /* Build back pointers if needed. */
6318 *node = reconstruct_complex_type (*node, vector_type);
6319
6320 return NULL_TREE;
6321 }
6322
6323 /* Handle a "vector_type" attribute; arguments as in
6324 struct attribute_spec.handler. */
6325
6326 static tree
6327 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6328 int ARG_UNUSED (flags), bool *no_add_attrs)
6329 {
6330 tree type = *node;
6331 tree vector_type;
6332
6333 *no_add_attrs = true;
6334
6335 if (TREE_CODE (type) != ARRAY_TYPE)
6336 {
6337 error ("attribute %qs applies to array types only",
6338 IDENTIFIER_POINTER (name));
6339 return NULL_TREE;
6340 }
6341
6342 vector_type = build_vector_type_for_array (type, name);
6343 if (!vector_type)
6344 return NULL_TREE;
6345
6346 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6347 *node = vector_type;
6348
6349 return NULL_TREE;
6350 }
6351
6352 /* ----------------------------------------------------------------------- *
6353 * BUILTIN FUNCTIONS *
6354 * ----------------------------------------------------------------------- */
6355
6356 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6357 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6358 if nonansi_p and flag_no_nonansi_builtin. */
6359
6360 static void
6361 def_builtin_1 (enum built_in_function fncode,
6362 const char *name,
6363 enum built_in_class fnclass,
6364 tree fntype, tree libtype,
6365 bool both_p, bool fallback_p,
6366 bool nonansi_p ATTRIBUTE_UNUSED,
6367 tree fnattrs, bool implicit_p)
6368 {
6369 tree decl;
6370 const char *libname;
6371
6372 /* Preserve an already installed decl. It most likely was setup in advance
6373 (e.g. as part of the internal builtins) for specific reasons. */
6374 if (builtin_decl_explicit (fncode))
6375 return;
6376
6377 gcc_assert ((!both_p && !fallback_p)
6378 || !strncmp (name, "__builtin_",
6379 strlen ("__builtin_")));
6380
6381 libname = name + strlen ("__builtin_");
6382 decl = add_builtin_function (name, fntype, fncode, fnclass,
6383 (fallback_p ? libname : NULL),
6384 fnattrs);
6385 if (both_p)
6386 /* ??? This is normally further controlled by command-line options
6387 like -fno-builtin, but we don't have them for Ada. */
6388 add_builtin_function (libname, libtype, fncode, fnclass,
6389 NULL, fnattrs);
6390
6391 set_builtin_decl (fncode, decl, implicit_p);
6392 }
6393
6394 static int flag_isoc94 = 0;
6395 static int flag_isoc99 = 0;
6396 static int flag_isoc11 = 0;
6397
6398 /* Install what the common builtins.def offers. */
6399
6400 static void
6401 install_builtin_functions (void)
6402 {
6403 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6404 NONANSI_P, ATTRS, IMPLICIT, COND) \
6405 if (NAME && COND) \
6406 def_builtin_1 (ENUM, NAME, CLASS, \
6407 builtin_types[(int) TYPE], \
6408 builtin_types[(int) LIBTYPE], \
6409 BOTH_P, FALLBACK_P, NONANSI_P, \
6410 built_in_attributes[(int) ATTRS], IMPLICIT);
6411 #include "builtins.def"
6412 }
6413
6414 /* ----------------------------------------------------------------------- *
6415 * BUILTIN FUNCTIONS *
6416 * ----------------------------------------------------------------------- */
6417
6418 /* Install the builtin functions we might need. */
6419
6420 void
6421 gnat_install_builtins (void)
6422 {
6423 install_builtin_elementary_types ();
6424 install_builtin_function_types ();
6425 install_builtin_attributes ();
6426
6427 /* Install builtins used by generic middle-end pieces first. Some of these
6428 know about internal specificities and control attributes accordingly, for
6429 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6430 the generic definition from builtins.def. */
6431 build_common_builtin_nodes ();
6432
6433 /* Now, install the target specific builtins, such as the AltiVec family on
6434 ppc, and the common set as exposed by builtins.def. */
6435 targetm.init_builtins ();
6436 install_builtin_functions ();
6437 }
6438
6439 #include "gt-ada-utils.h"
6440 #include "gtype-ada.h"