d415f492cb94b0b91950ec79eeba8be2a9eec91b
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2018, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
101
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
111 affects_type_identity, handler, exclude } */
112 { "const", 0, 0, true, false, false, false,
113 handle_const_attribute, NULL },
114 { "nothrow", 0, 0, true, false, false, false,
115 handle_nothrow_attribute, NULL },
116 { "pure", 0, 0, true, false, false, false,
117 handle_pure_attribute, NULL },
118 { "no vops", 0, 0, true, false, false, false,
119 handle_novops_attribute, NULL },
120 { "nonnull", 0, -1, false, true, true, false,
121 handle_nonnull_attribute, NULL },
122 { "sentinel", 0, 1, false, true, true, false,
123 handle_sentinel_attribute, NULL },
124 { "noreturn", 0, 0, true, false, false, false,
125 handle_noreturn_attribute, NULL },
126 { "noinline", 0, 0, true, false, false, false,
127 handle_noinline_attribute, NULL },
128 { "noclone", 0, 0, true, false, false, false,
129 handle_noclone_attribute, NULL },
130 { "leaf", 0, 0, true, false, false, false,
131 handle_leaf_attribute, NULL },
132 { "always_inline",0, 0, true, false, false, false,
133 handle_always_inline_attribute, NULL },
134 { "malloc", 0, 0, true, false, false, false,
135 handle_malloc_attribute, NULL },
136 { "type generic", 0, 0, false, true, true, false,
137 handle_type_generic_attribute, NULL },
138
139 { "vector_size", 1, 1, false, true, false, false,
140 handle_vector_size_attribute, NULL },
141 { "vector_type", 0, 0, false, true, false, false,
142 handle_vector_type_attribute, NULL },
143 { "may_alias", 0, 0, false, true, false, false, NULL, NULL },
144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, false, fake_attribute_handler,
149 NULL },
150 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler,
151 NULL },
152
153 { NULL, 0, 0, false, false, false, false, NULL, NULL }
154 };
155
156 /* Associates a GNAT tree node to a GCC tree node. It is used in
157 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
158 of `save_gnu_tree' for more info. */
159 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
160
161 #define GET_GNU_TREE(GNAT_ENTITY) \
162 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
163
164 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
165 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
168 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
171 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
172
173 #define GET_DUMMY_NODE(GNAT_ENTITY) \
174 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
175
176 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
177 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
178
179 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
180 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
181
182 /* This variable keeps a table for types for each precision so that we only
183 allocate each of them once. Signed and unsigned types are kept separate.
184
185 Note that these types are only used when fold-const requests something
186 special. Perhaps we should NOT share these types; we'll see how it
187 goes later. */
188 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
189
190 /* Likewise for float types, but record these by mode. */
191 static GTY(()) tree float_types[NUM_MACHINE_MODES];
192
193 /* For each binding contour we allocate a binding_level structure to indicate
194 the binding depth. */
195
196 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
197 /* The binding level containing this one (the enclosing binding level). */
198 struct gnat_binding_level *chain;
199 /* The BLOCK node for this level. */
200 tree block;
201 /* If nonzero, the setjmp buffer that needs to be updated for any
202 variable-sized definition within this context. */
203 tree jmpbuf_decl;
204 };
205
206 /* The binding level currently in effect. */
207 static GTY(()) struct gnat_binding_level *current_binding_level;
208
209 /* A chain of gnat_binding_level structures awaiting reuse. */
210 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
211
212 /* The context to be used for global declarations. */
213 static GTY(()) tree global_context;
214
215 /* An array of global declarations. */
216 static GTY(()) vec<tree, va_gc> *global_decls;
217
218 /* An array of builtin function declarations. */
219 static GTY(()) vec<tree, va_gc> *builtin_decls;
220
221 /* A chain of unused BLOCK nodes. */
222 static GTY((deletable)) tree free_block_chain;
223
224 /* A hash table of padded types. It is modelled on the generic type
225 hash table in tree.c, which must thus be used as a reference. */
226
227 struct GTY((for_user)) pad_type_hash
228 {
229 hashval_t hash;
230 tree type;
231 };
232
233 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
234 {
235 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
236 static bool equal (pad_type_hash *a, pad_type_hash *b);
237
238 static int
239 keep_cache_entry (pad_type_hash *&t)
240 {
241 return ggc_marked_p (t->type);
242 }
243 };
244
245 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
246
247 static tree merge_sizes (tree, tree, tree, bool, bool);
248 static tree fold_bit_position (const_tree);
249 static tree compute_related_constant (tree, tree);
250 static tree split_plus (tree, tree *);
251 static tree float_type_for_precision (int, machine_mode);
252 static tree convert_to_fat_pointer (tree, tree);
253 static unsigned int scale_by_factor_of (tree, unsigned int);
254 static bool potential_alignment_gap (tree, tree, tree);
255
256 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
257 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
258 struct deferred_decl_context_node
259 {
260 /* The ..._DECL node to work on. */
261 tree decl;
262
263 /* The corresponding entity's Scope. */
264 Entity_Id gnat_scope;
265
266 /* The value of force_global when DECL was pushed. */
267 int force_global;
268
269 /* The list of ..._TYPE nodes to propagate the context to. */
270 vec<tree> types;
271
272 /* The next queue item. */
273 struct deferred_decl_context_node *next;
274 };
275
276 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
277
278 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
279 feed it with the elaboration of GNAT_SCOPE. */
280 static struct deferred_decl_context_node *
281 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
282
283 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
284 feed it with the DECL_CONTEXT computed as part of N as soon as it is
285 computed. */
286 static void add_deferred_type_context (struct deferred_decl_context_node *n,
287 tree type);
288 \f
289 /* Initialize data structures of the utils.c module. */
290
291 void
292 init_gnat_utils (void)
293 {
294 /* Initialize the association of GNAT nodes to GCC trees. */
295 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296
297 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
298 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
299
300 /* Initialize the hash table of padded types. */
301 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
302 }
303
304 /* Destroy data structures of the utils.c module. */
305
306 void
307 destroy_gnat_utils (void)
308 {
309 /* Destroy the association of GNAT nodes to GCC trees. */
310 ggc_free (associate_gnat_to_gnu);
311 associate_gnat_to_gnu = NULL;
312
313 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
314 ggc_free (dummy_node_table);
315 dummy_node_table = NULL;
316
317 /* Destroy the hash table of padded types. */
318 pad_type_hash_table->empty ();
319 pad_type_hash_table = NULL;
320 }
321 \f
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
325
326 If GNU_DECL is zero, reset a previous association. */
327
328 void
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
330 {
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
334 Gigi problem. */
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
338
339 SET_GNU_TREE (gnat_entity, gnu_decl);
340 }
341
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
344
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
347
348 tree
349 get_gnu_tree (Entity_Id gnat_entity)
350 {
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
353 }
354
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
356
357 bool
358 present_gnu_tree (Entity_Id gnat_entity)
359 {
360 return PRESENT_GNU_TREE (gnat_entity);
361 }
362 \f
363 /* Make a dummy type corresponding to GNAT_TYPE. */
364
365 tree
366 make_dummy_type (Entity_Id gnat_type)
367 {
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type, debug_type;
370
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
373 if (No (gnat_equiv))
374 gnat_equiv = gnat_type;
375
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
379
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
381 an ENUMERAL_TYPE. */
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
384 : ENUMERAL_TYPE);
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
391
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
393
394 /* Create a debug type so that debug info consumers only see an unspecified
395 type. */
396 if (Needs_Debug_Info (gnat_type))
397 {
398 debug_type = make_node (LANG_TYPE);
399 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
400
401 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
402 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
403 }
404
405 return gnu_type;
406 }
407
408 /* Return the dummy type that was made for GNAT_TYPE, if any. */
409
410 tree
411 get_dummy_type (Entity_Id gnat_type)
412 {
413 return GET_DUMMY_NODE (gnat_type);
414 }
415
416 /* Build dummy fat and thin pointer types whose designated type is specified
417 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
418
419 void
420 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
421 {
422 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
423 tree gnu_fat_type, fields, gnu_object_type;
424
425 gnu_template_type = make_node (RECORD_TYPE);
426 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
427 TYPE_DUMMY_P (gnu_template_type) = 1;
428 gnu_ptr_template = build_pointer_type (gnu_template_type);
429
430 gnu_array_type = make_node (ENUMERAL_TYPE);
431 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
432 TYPE_DUMMY_P (gnu_array_type) = 1;
433 gnu_ptr_array = build_pointer_type (gnu_array_type);
434
435 gnu_fat_type = make_node (RECORD_TYPE);
436 /* Build a stub DECL to trigger the special processing for fat pointer types
437 in gnat_pushdecl. */
438 TYPE_NAME (gnu_fat_type)
439 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
440 gnu_fat_type);
441 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 DECL_CHAIN (fields)
444 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
445 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
446 finish_fat_pointer_type (gnu_fat_type, fields);
447 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
448 /* Suppress debug info until after the type is completed. */
449 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
450
451 gnu_object_type = make_node (RECORD_TYPE);
452 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
453 TYPE_DUMMY_P (gnu_object_type) = 1;
454
455 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
456 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
457 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
458 }
459 \f
460 /* Return true if we are in the global binding level. */
461
462 bool
463 global_bindings_p (void)
464 {
465 return force_global || !current_function_decl;
466 }
467
468 /* Enter a new binding level. */
469
470 void
471 gnat_pushlevel (void)
472 {
473 struct gnat_binding_level *newlevel = NULL;
474
475 /* Reuse a struct for this binding level, if there is one. */
476 if (free_binding_level)
477 {
478 newlevel = free_binding_level;
479 free_binding_level = free_binding_level->chain;
480 }
481 else
482 newlevel = ggc_alloc<gnat_binding_level> ();
483
484 /* Use a free BLOCK, if any; otherwise, allocate one. */
485 if (free_block_chain)
486 {
487 newlevel->block = free_block_chain;
488 free_block_chain = BLOCK_CHAIN (free_block_chain);
489 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
490 }
491 else
492 newlevel->block = make_node (BLOCK);
493
494 /* Point the BLOCK we just made to its parent. */
495 if (current_binding_level)
496 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
497
498 BLOCK_VARS (newlevel->block) = NULL_TREE;
499 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
500 TREE_USED (newlevel->block) = 1;
501
502 /* Add this level to the front of the chain (stack) of active levels. */
503 newlevel->chain = current_binding_level;
504 newlevel->jmpbuf_decl = NULL_TREE;
505 current_binding_level = newlevel;
506 }
507
508 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
509 and point FNDECL to this BLOCK. */
510
511 void
512 set_current_block_context (tree fndecl)
513 {
514 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
515 DECL_INITIAL (fndecl) = current_binding_level->block;
516 set_block_for_group (current_binding_level->block);
517 }
518
519 /* Set the jmpbuf_decl for the current binding level to DECL. */
520
521 void
522 set_block_jmpbuf_decl (tree decl)
523 {
524 current_binding_level->jmpbuf_decl = decl;
525 }
526
527 /* Get the jmpbuf_decl, if any, for the current binding level. */
528
529 tree
530 get_block_jmpbuf_decl (void)
531 {
532 return current_binding_level->jmpbuf_decl;
533 }
534
535 /* Exit a binding level. Set any BLOCK into the current code group. */
536
537 void
538 gnat_poplevel (void)
539 {
540 struct gnat_binding_level *level = current_binding_level;
541 tree block = level->block;
542
543 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
544 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
545
546 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
547 are no variables free the block and merge its subblocks into those of its
548 parent block. Otherwise, add it to the list of its parent. */
549 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
550 ;
551 else if (!BLOCK_VARS (block))
552 {
553 BLOCK_SUBBLOCKS (level->chain->block)
554 = block_chainon (BLOCK_SUBBLOCKS (block),
555 BLOCK_SUBBLOCKS (level->chain->block));
556 BLOCK_CHAIN (block) = free_block_chain;
557 free_block_chain = block;
558 }
559 else
560 {
561 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
562 BLOCK_SUBBLOCKS (level->chain->block) = block;
563 TREE_USED (block) = 1;
564 set_block_for_group (block);
565 }
566
567 /* Free this binding structure. */
568 current_binding_level = level->chain;
569 level->chain = free_binding_level;
570 free_binding_level = level;
571 }
572
573 /* Exit a binding level and discard the associated BLOCK. */
574
575 void
576 gnat_zaplevel (void)
577 {
578 struct gnat_binding_level *level = current_binding_level;
579 tree block = level->block;
580
581 BLOCK_CHAIN (block) = free_block_chain;
582 free_block_chain = block;
583
584 /* Free this binding structure. */
585 current_binding_level = level->chain;
586 level->chain = free_binding_level;
587 free_binding_level = level;
588 }
589 \f
590 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
591
592 static void
593 gnat_set_type_context (tree type, tree context)
594 {
595 tree decl = TYPE_STUB_DECL (type);
596
597 TYPE_CONTEXT (type) = context;
598
599 while (decl && DECL_PARALLEL_TYPE (decl))
600 {
601 tree parallel_type = DECL_PARALLEL_TYPE (decl);
602
603 /* Give a context to the parallel types and their stub decl, if any.
604 Some parallel types seems to be present in multiple parallel type
605 chains, so don't mess with their context if they already have one. */
606 if (!TYPE_CONTEXT (parallel_type))
607 {
608 if (TYPE_STUB_DECL (parallel_type))
609 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
610 TYPE_CONTEXT (parallel_type) = context;
611 }
612
613 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
614 }
615 }
616
617 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
618 the debug info, or Empty if there is no such scope. If not NULL, set
619 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
620
621 Entity_Id
622 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
623 {
624 Entity_Id gnat_entity;
625
626 if (is_subprogram)
627 *is_subprogram = false;
628
629 if (Nkind (gnat_node) == N_Defining_Identifier
630 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
631 gnat_entity = Scope (gnat_node);
632 else
633 return Empty;
634
635 while (Present (gnat_entity))
636 {
637 switch (Ekind (gnat_entity))
638 {
639 case E_Function:
640 case E_Procedure:
641 if (Present (Protected_Body_Subprogram (gnat_entity)))
642 gnat_entity = Protected_Body_Subprogram (gnat_entity);
643
644 /* If the scope is a subprogram, then just rely on
645 current_function_decl, so that we don't have to defer
646 anything. This is needed because other places rely on the
647 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
648 if (is_subprogram)
649 *is_subprogram = true;
650 return gnat_entity;
651
652 case E_Record_Type:
653 case E_Record_Subtype:
654 return gnat_entity;
655
656 default:
657 /* By default, we are not interested in this particular scope: go to
658 the outer one. */
659 break;
660 }
661
662 gnat_entity = Scope (gnat_entity);
663 }
664
665 return Empty;
666 }
667
668 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
669 of N otherwise. */
670
671 static void
672 defer_or_set_type_context (tree type, tree context,
673 struct deferred_decl_context_node *n)
674 {
675 if (n)
676 add_deferred_type_context (n, type);
677 else
678 gnat_set_type_context (type, context);
679 }
680
681 /* Return global_context, but create it first if need be. */
682
683 static tree
684 get_global_context (void)
685 {
686 if (!global_context)
687 {
688 global_context
689 = build_translation_unit_decl (get_identifier (main_input_filename));
690 debug_hooks->register_main_translation_unit (global_context);
691 }
692
693 return global_context;
694 }
695
696 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
697 for location information and flag propagation. */
698
699 void
700 gnat_pushdecl (tree decl, Node_Id gnat_node)
701 {
702 tree context = NULL_TREE;
703 struct deferred_decl_context_node *deferred_decl_context = NULL;
704
705 /* If explicitely asked to make DECL global or if it's an imported nested
706 object, short-circuit the regular Scope-based context computation. */
707 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
708 {
709 /* Rely on the GNAT scope, or fallback to the current_function_decl if
710 the GNAT scope reached the global scope, if it reached a subprogram
711 or the declaration is a subprogram or a variable (for them we skip
712 intermediate context types because the subprogram body elaboration
713 machinery and the inliner both expect a subprogram context).
714
715 Falling back to current_function_decl is necessary for implicit
716 subprograms created by gigi, such as the elaboration subprograms. */
717 bool context_is_subprogram = false;
718 const Entity_Id gnat_scope
719 = get_debug_scope (gnat_node, &context_is_subprogram);
720
721 if (Present (gnat_scope)
722 && !context_is_subprogram
723 && TREE_CODE (decl) != FUNCTION_DECL
724 && TREE_CODE (decl) != VAR_DECL)
725 /* Always assume the scope has not been elaborated, thus defer the
726 context propagation to the time its elaboration will be
727 available. */
728 deferred_decl_context
729 = add_deferred_decl_context (decl, gnat_scope, force_global);
730
731 /* External declarations (when force_global > 0) may not be in a
732 local context. */
733 else if (current_function_decl && force_global == 0)
734 context = current_function_decl;
735 }
736
737 /* If either we are forced to be in global mode or if both the GNAT scope and
738 the current_function_decl did not help in determining the context, use the
739 global scope. */
740 if (!deferred_decl_context && !context)
741 context = get_global_context ();
742
743 /* Functions imported in another function are not really nested.
744 For really nested functions mark them initially as needing
745 a static chain for uses of that flag before unnesting;
746 lower_nested_functions will then recompute it. */
747 if (TREE_CODE (decl) == FUNCTION_DECL
748 && !TREE_PUBLIC (decl)
749 && context
750 && (TREE_CODE (context) == FUNCTION_DECL
751 || decl_function_context (context)))
752 DECL_STATIC_CHAIN (decl) = 1;
753
754 if (!deferred_decl_context)
755 DECL_CONTEXT (decl) = context;
756
757 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
758
759 /* Set the location of DECL and emit a declaration for it. */
760 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
761 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
762
763 add_decl_expr (decl, gnat_node);
764
765 /* Put the declaration on the list. The list of declarations is in reverse
766 order. The list will be reversed later. Put global declarations in the
767 globals list and local ones in the current block. But skip TYPE_DECLs
768 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
769 with the debugger and aren't needed anyway. */
770 if (!(TREE_CODE (decl) == TYPE_DECL
771 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
772 {
773 /* External declarations must go to the binding level they belong to.
774 This will make corresponding imported entities are available in the
775 debugger at the proper time. */
776 if (DECL_EXTERNAL (decl)
777 && TREE_CODE (decl) == FUNCTION_DECL
778 && DECL_BUILT_IN (decl))
779 vec_safe_push (builtin_decls, decl);
780 else if (global_bindings_p ())
781 vec_safe_push (global_decls, decl);
782 else
783 {
784 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
785 BLOCK_VARS (current_binding_level->block) = decl;
786 }
787 }
788
789 /* For the declaration of a type, set its name either if it isn't already
790 set or if the previous type name was not derived from a source name.
791 We'd rather have the type named with a real name and all the pointer
792 types to the same object have the same node, except when the names are
793 both derived from source names. */
794 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
795 {
796 tree t = TREE_TYPE (decl);
797
798 /* Array and pointer types aren't tagged types in the C sense so we need
799 to generate a typedef in DWARF for them and make sure it is preserved,
800 unless the type is artificial. */
801 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
802 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
803 || DECL_ARTIFICIAL (decl)))
804 ;
805 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
806 generate the typedef in DWARF. Also do that for fat pointer types
807 because, even though they are tagged types in the C sense, they are
808 still XUP types attached to the base array type at this point. */
809 else if (!DECL_ARTIFICIAL (decl)
810 && (TREE_CODE (t) == ARRAY_TYPE
811 || TREE_CODE (t) == POINTER_TYPE
812 || TYPE_IS_FAT_POINTER_P (t)))
813 {
814 tree tt = build_variant_type_copy (t);
815 TYPE_NAME (tt) = decl;
816 defer_or_set_type_context (tt,
817 DECL_CONTEXT (decl),
818 deferred_decl_context);
819 TREE_TYPE (decl) = tt;
820 if (TYPE_NAME (t)
821 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
822 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
823 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
824 else
825 DECL_ORIGINAL_TYPE (decl) = t;
826 /* Array types need to have a name so that they can be related to
827 their GNAT encodings. */
828 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
829 TYPE_NAME (t) = DECL_NAME (decl);
830 t = NULL_TREE;
831 }
832 else if (TYPE_NAME (t)
833 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
834 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
835 ;
836 else
837 t = NULL_TREE;
838
839 /* Propagate the name to all the variants, this is needed for the type
840 qualifiers machinery to work properly (see check_qualified_type).
841 Also propagate the context to them. Note that it will be propagated
842 to all parallel types too thanks to gnat_set_type_context. */
843 if (t)
844 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
845 /* ??? Because of the previous kludge, we can have variants of fat
846 pointer types with different names. */
847 if (!(TYPE_IS_FAT_POINTER_P (t)
848 && TYPE_NAME (t)
849 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
850 {
851 TYPE_NAME (t) = decl;
852 defer_or_set_type_context (t,
853 DECL_CONTEXT (decl),
854 deferred_decl_context);
855 }
856 }
857 }
858 \f
859 /* Create a record type that contains a SIZE bytes long field of TYPE with a
860 starting bit position so that it is aligned to ALIGN bits, and leaving at
861 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
862 record is guaranteed to get. GNAT_NODE is used for the position of the
863 associated TYPE_DECL. */
864
865 tree
866 make_aligning_type (tree type, unsigned int align, tree size,
867 unsigned int base_align, int room, Node_Id gnat_node)
868 {
869 /* We will be crafting a record type with one field at a position set to be
870 the next multiple of ALIGN past record'address + room bytes. We use a
871 record placeholder to express record'address. */
872 tree record_type = make_node (RECORD_TYPE);
873 tree record = build0 (PLACEHOLDER_EXPR, record_type);
874
875 tree record_addr_st
876 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
877
878 /* The diagram below summarizes the shape of what we manipulate:
879
880 <--------- pos ---------->
881 { +------------+-------------+-----------------+
882 record =>{ |############| ... | field (type) |
883 { +------------+-------------+-----------------+
884 |<-- room -->|<- voffset ->|<---- size ----->|
885 o o
886 | |
887 record_addr vblock_addr
888
889 Every length is in sizetype bytes there, except "pos" which has to be
890 set as a bit position in the GCC tree for the record. */
891 tree room_st = size_int (room);
892 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
893 tree voffset_st, pos, field;
894
895 tree name = TYPE_IDENTIFIER (type);
896
897 name = concat_name (name, "ALIGN");
898 TYPE_NAME (record_type) = name;
899
900 /* Compute VOFFSET and then POS. The next byte position multiple of some
901 alignment after some address is obtained by "and"ing the alignment minus
902 1 with the two's complement of the address. */
903 voffset_st = size_binop (BIT_AND_EXPR,
904 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
905 size_int ((align / BITS_PER_UNIT) - 1));
906
907 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
908 pos = size_binop (MULT_EXPR,
909 convert (bitsizetype,
910 size_binop (PLUS_EXPR, room_st, voffset_st)),
911 bitsize_unit_node);
912
913 /* Craft the GCC record representation. We exceptionally do everything
914 manually here because 1) our generic circuitry is not quite ready to
915 handle the complex position/size expressions we are setting up, 2) we
916 have a strong simplifying factor at hand: we know the maximum possible
917 value of voffset, and 3) we have to set/reset at least the sizes in
918 accordance with this maximum value anyway, as we need them to convey
919 what should be "alloc"ated for this type.
920
921 Use -1 as the 'addressable' indication for the field to prevent the
922 creation of a bitfield. We don't need one, it would have damaging
923 consequences on the alignment computation, and create_field_decl would
924 make one without this special argument, for instance because of the
925 complex position expression. */
926 field = create_field_decl (get_identifier ("F"), type, record_type, size,
927 pos, 1, -1);
928 TYPE_FIELDS (record_type) = field;
929
930 SET_TYPE_ALIGN (record_type, base_align);
931 TYPE_USER_ALIGN (record_type) = 1;
932
933 TYPE_SIZE (record_type)
934 = size_binop (PLUS_EXPR,
935 size_binop (MULT_EXPR, convert (bitsizetype, size),
936 bitsize_unit_node),
937 bitsize_int (align + room * BITS_PER_UNIT));
938 TYPE_SIZE_UNIT (record_type)
939 = size_binop (PLUS_EXPR, size,
940 size_int (room + align / BITS_PER_UNIT));
941
942 SET_TYPE_MODE (record_type, BLKmode);
943 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
944
945 /* Declare it now since it will never be declared otherwise. This is
946 necessary to ensure that its subtrees are properly marked. */
947 create_type_decl (name, record_type, true, false, gnat_node);
948
949 return record_type;
950 }
951
952 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
953 as the field type of a packed record if IN_RECORD is true, or as the
954 component type of a packed array if IN_RECORD is false. See if we can
955 rewrite it either as a type that has non-BLKmode, which we can pack
956 tighter in the packed record case, or as a smaller type with at most
957 MAX_ALIGN alignment if the value is non-zero. If so, return the new
958 type; if not, return the original type. */
959
960 tree
961 make_packable_type (tree type, bool in_record, unsigned int max_align)
962 {
963 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
964 unsigned HOST_WIDE_INT new_size;
965 unsigned int align = TYPE_ALIGN (type);
966 unsigned int new_align;
967
968 /* No point in doing anything if the size is zero. */
969 if (size == 0)
970 return type;
971
972 tree new_type = make_node (TREE_CODE (type));
973
974 /* Copy the name and flags from the old type to that of the new.
975 Note that we rely on the pointer equality created here for
976 TYPE_NAME to look through conversions in various places. */
977 TYPE_NAME (new_type) = TYPE_NAME (type);
978 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
979 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
980 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
981 if (TREE_CODE (type) == RECORD_TYPE)
982 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
983
984 /* If we are in a record and have a small size, set the alignment to
985 try for an integral mode. Otherwise set it to try for a smaller
986 type with BLKmode. */
987 if (in_record && size <= MAX_FIXED_MODE_SIZE)
988 {
989 new_size = ceil_pow2 (size);
990 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
991 SET_TYPE_ALIGN (new_type, new_align);
992 }
993 else
994 {
995 /* Do not try to shrink the size if the RM size is not constant. */
996 if (TYPE_CONTAINS_TEMPLATE_P (type)
997 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
998 return type;
999
1000 /* Round the RM size up to a unit boundary to get the minimal size
1001 for a BLKmode record. Give up if it's already the size and we
1002 don't need to lower the alignment. */
1003 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1004 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1005 if (new_size == size && (max_align == 0 || align <= max_align))
1006 return type;
1007
1008 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1009 if (max_align > 0 && new_align > max_align)
1010 new_align = max_align;
1011 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1012 }
1013
1014 TYPE_USER_ALIGN (new_type) = 1;
1015
1016 /* Now copy the fields, keeping the position and size as we don't want
1017 to change the layout by propagating the packedness downwards. */
1018 tree new_field_list = NULL_TREE;
1019 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1020 {
1021 tree new_field_type = TREE_TYPE (field);
1022 tree new_field, new_size;
1023
1024 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1025 && !TYPE_FAT_POINTER_P (new_field_type)
1026 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1027 new_field_type = make_packable_type (new_field_type, true, max_align);
1028
1029 /* However, for the last field in a not already packed record type
1030 that is of an aggregate type, we need to use the RM size in the
1031 packable version of the record type, see finish_record_type. */
1032 if (!DECL_CHAIN (field)
1033 && !TYPE_PACKED (type)
1034 && RECORD_OR_UNION_TYPE_P (new_field_type)
1035 && !TYPE_FAT_POINTER_P (new_field_type)
1036 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1037 && TYPE_ADA_SIZE (new_field_type))
1038 new_size = TYPE_ADA_SIZE (new_field_type);
1039 else
1040 new_size = DECL_SIZE (field);
1041
1042 new_field
1043 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1044 new_size, bit_position (field),
1045 TYPE_PACKED (type),
1046 !DECL_NONADDRESSABLE_P (field));
1047
1048 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1049 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1050 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1051 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1052
1053 DECL_CHAIN (new_field) = new_field_list;
1054 new_field_list = new_field;
1055 }
1056
1057 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1058 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1059 if (TYPE_STUB_DECL (type))
1060 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1061 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1062
1063 /* If this is a padding record, we never want to make the size smaller
1064 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1065 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1066 {
1067 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1068 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1069 new_size = size;
1070 }
1071 else
1072 {
1073 TYPE_SIZE (new_type) = bitsize_int (new_size);
1074 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1075 }
1076
1077 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1078 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1079
1080 compute_record_mode (new_type);
1081
1082 /* Try harder to get a packable type if necessary, for example
1083 in case the record itself contains a BLKmode field. */
1084 if (in_record && TYPE_MODE (new_type) == BLKmode)
1085 SET_TYPE_MODE (new_type,
1086 mode_for_size_tree (TYPE_SIZE (new_type),
1087 MODE_INT, 1).else_blk ());
1088
1089 /* If neither mode nor size nor alignment shrunk, return the old type. */
1090 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1091 return type;
1092
1093 return new_type;
1094 }
1095
1096 /* Return true if TYPE has an unsigned representation. This needs to be used
1097 when the representation of types whose precision is not equal to their size
1098 is manipulated based on the RM size. */
1099
1100 static inline bool
1101 type_unsigned_for_rm (tree type)
1102 {
1103 /* This is the common case. */
1104 if (TYPE_UNSIGNED (type))
1105 return true;
1106
1107 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1108 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1109 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1110 return true;
1111
1112 return false;
1113 }
1114
1115 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1116 If TYPE is the best type, return it. Otherwise, make a new type. We
1117 only support new integral and pointer types. FOR_BIASED is true if
1118 we are making a biased type. */
1119
1120 tree
1121 make_type_from_size (tree type, tree size_tree, bool for_biased)
1122 {
1123 unsigned HOST_WIDE_INT size;
1124 bool biased_p;
1125 tree new_type;
1126
1127 /* If size indicates an error, just return TYPE to avoid propagating
1128 the error. Likewise if it's too large to represent. */
1129 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1130 return type;
1131
1132 size = tree_to_uhwi (size_tree);
1133
1134 switch (TREE_CODE (type))
1135 {
1136 case BOOLEAN_TYPE:
1137 /* Do not mess with boolean types that have foreign convention. */
1138 if (TYPE_PRECISION (type) == 1 && TYPE_SIZE (type) == size_tree)
1139 break;
1140
1141 /* ... fall through ... */
1142
1143 case INTEGER_TYPE:
1144 case ENUMERAL_TYPE:
1145 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1146 && TYPE_BIASED_REPRESENTATION_P (type));
1147
1148 /* Integer types with precision 0 are forbidden. */
1149 if (size == 0)
1150 size = 1;
1151
1152 /* Only do something if the type isn't a packed array type and doesn't
1153 already have the proper size and the size isn't too large. */
1154 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1155 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1156 || size > LONG_LONG_TYPE_SIZE)
1157 break;
1158
1159 biased_p |= for_biased;
1160
1161 /* The type should be an unsigned type if the original type is unsigned
1162 or if the lower bound is constant and non-negative or if the type is
1163 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1164 if (type_unsigned_for_rm (type) || biased_p)
1165 new_type = make_unsigned_type (size);
1166 else
1167 new_type = make_signed_type (size);
1168 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1169 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1170 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1171 /* Copy the name to show that it's essentially the same type and
1172 not a subrange type. */
1173 TYPE_NAME (new_type) = TYPE_NAME (type);
1174 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1175 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1176 return new_type;
1177
1178 case RECORD_TYPE:
1179 /* Do something if this is a fat pointer, in which case we
1180 may need to return the thin pointer. */
1181 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1182 {
1183 scalar_int_mode p_mode;
1184 if (!int_mode_for_size (size, 0).exists (&p_mode)
1185 || !targetm.valid_pointer_mode (p_mode))
1186 p_mode = ptr_mode;
1187 return
1188 build_pointer_type_for_mode
1189 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1190 p_mode, 0);
1191 }
1192 break;
1193
1194 case POINTER_TYPE:
1195 /* Only do something if this is a thin pointer, in which case we
1196 may need to return the fat pointer. */
1197 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1198 return
1199 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1200 break;
1201
1202 default:
1203 break;
1204 }
1205
1206 return type;
1207 }
1208
1209 /* Return true iff the padded types are equivalent. */
1210
1211 bool
1212 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1213 {
1214 tree type1, type2;
1215
1216 if (t1->hash != t2->hash)
1217 return 0;
1218
1219 type1 = t1->type;
1220 type2 = t2->type;
1221
1222 /* We consider that the padded types are equivalent if they pad the same type
1223 and have the same size, alignment, RM size and storage order. Taking the
1224 mode into account is redundant since it is determined by the others. */
1225 return
1226 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1227 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1228 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1229 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1230 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1231 }
1232
1233 /* Compute the hash value for the padded TYPE. */
1234
1235 static hashval_t
1236 hash_pad_type (tree type)
1237 {
1238 hashval_t hashcode;
1239
1240 hashcode
1241 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1242 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1243 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1244 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1245
1246 return hashcode;
1247 }
1248
1249 /* Look up the padded TYPE in the hash table and return its canonical version
1250 if it exists; otherwise, insert it into the hash table. */
1251
1252 static tree
1253 canonicalize_pad_type (tree type)
1254 {
1255 const hashval_t hashcode = hash_pad_type (type);
1256 struct pad_type_hash in, *h, **slot;
1257
1258 in.hash = hashcode;
1259 in.type = type;
1260 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1261 h = *slot;
1262 if (!h)
1263 {
1264 h = ggc_alloc<pad_type_hash> ();
1265 h->hash = hashcode;
1266 h->type = type;
1267 *slot = h;
1268 }
1269
1270 return h->type;
1271 }
1272
1273 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1274 if needed. We have already verified that SIZE and ALIGN are large enough.
1275 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1276 IS_COMPONENT_TYPE is true if this is being done for the component type of
1277 an array. IS_USER_TYPE is true if the original type needs to be completed.
1278 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1279 the RM size of the resulting type is to be set to SIZE too; in this case,
1280 the padded type is canonicalized before being returned. */
1281
1282 tree
1283 maybe_pad_type (tree type, tree size, unsigned int align,
1284 Entity_Id gnat_entity, bool is_component_type,
1285 bool is_user_type, bool definition, bool set_rm_size)
1286 {
1287 tree orig_size = TYPE_SIZE (type);
1288 unsigned int orig_align = TYPE_ALIGN (type);
1289 tree record, field;
1290
1291 /* If TYPE is a padded type, see if it agrees with any size and alignment
1292 we were given. If so, return the original type. Otherwise, strip
1293 off the padding, since we will either be returning the inner type
1294 or repadding it. If no size or alignment is specified, use that of
1295 the original padded type. */
1296 if (TYPE_IS_PADDING_P (type))
1297 {
1298 if ((!size
1299 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1300 && (align == 0 || align == orig_align))
1301 return type;
1302
1303 if (!size)
1304 size = orig_size;
1305 if (align == 0)
1306 align = orig_align;
1307
1308 type = TREE_TYPE (TYPE_FIELDS (type));
1309 orig_size = TYPE_SIZE (type);
1310 orig_align = TYPE_ALIGN (type);
1311 }
1312
1313 /* If the size is either not being changed or is being made smaller (which
1314 is not done here and is only valid for bitfields anyway), show the size
1315 isn't changing. Likewise, clear the alignment if it isn't being
1316 changed. Then return if we aren't doing anything. */
1317 if (size
1318 && (operand_equal_p (size, orig_size, 0)
1319 || (TREE_CODE (orig_size) == INTEGER_CST
1320 && tree_int_cst_lt (size, orig_size))))
1321 size = NULL_TREE;
1322
1323 if (align == orig_align)
1324 align = 0;
1325
1326 if (align == 0 && !size)
1327 return type;
1328
1329 /* If requested, complete the original type and give it a name. */
1330 if (is_user_type)
1331 create_type_decl (get_entity_name (gnat_entity), type,
1332 !Comes_From_Source (gnat_entity),
1333 !(TYPE_NAME (type)
1334 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1335 && DECL_IGNORED_P (TYPE_NAME (type))),
1336 gnat_entity);
1337
1338 /* We used to modify the record in place in some cases, but that could
1339 generate incorrect debugging information. So make a new record
1340 type and name. */
1341 record = make_node (RECORD_TYPE);
1342 TYPE_PADDING_P (record) = 1;
1343
1344 /* ??? Padding types around packed array implementation types will be
1345 considered as root types in the array descriptor language hook (see
1346 gnat_get_array_descr_info). Give them the original packed array type
1347 name so that the one coming from sources appears in the debugging
1348 information. */
1349 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1350 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1351 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1352 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1353 else if (Present (gnat_entity))
1354 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1355
1356 SET_TYPE_ALIGN (record, align ? align : orig_align);
1357 TYPE_SIZE (record) = size ? size : orig_size;
1358 TYPE_SIZE_UNIT (record)
1359 = convert (sizetype,
1360 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1361 bitsize_unit_node));
1362
1363 /* If we are changing the alignment and the input type is a record with
1364 BLKmode and a small constant size, try to make a form that has an
1365 integral mode. This might allow the padding record to also have an
1366 integral mode, which will be much more efficient. There is no point
1367 in doing so if a size is specified unless it is also a small constant
1368 size and it is incorrect to do so if we cannot guarantee that the mode
1369 will be naturally aligned since the field must always be addressable.
1370
1371 ??? This might not always be a win when done for a stand-alone object:
1372 since the nominal and the effective type of the object will now have
1373 different modes, a VIEW_CONVERT_EXPR will be required for converting
1374 between them and it might be hard to overcome afterwards, including
1375 at the RTL level when the stand-alone object is accessed as a whole. */
1376 if (align != 0
1377 && RECORD_OR_UNION_TYPE_P (type)
1378 && TYPE_MODE (type) == BLKmode
1379 && !TYPE_BY_REFERENCE_P (type)
1380 && TREE_CODE (orig_size) == INTEGER_CST
1381 && !TREE_OVERFLOW (orig_size)
1382 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1383 && (!size
1384 || (TREE_CODE (size) == INTEGER_CST
1385 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1386 {
1387 tree packable_type = make_packable_type (type, true);
1388 if (TYPE_MODE (packable_type) != BLKmode
1389 && align >= TYPE_ALIGN (packable_type))
1390 type = packable_type;
1391 }
1392
1393 /* Now create the field with the original size. */
1394 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1395 bitsize_zero_node, 0, 1);
1396 DECL_INTERNAL_P (field) = 1;
1397
1398 /* We will output additional debug info manually below. */
1399 finish_record_type (record, field, 1, false);
1400
1401 /* Set the RM size if requested. */
1402 if (set_rm_size)
1403 {
1404 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1405
1406 /* If the padded type is complete and has constant size, we canonicalize
1407 it by means of the hash table. This is consistent with the language
1408 semantics and ensures that gigi and the middle-end have a common view
1409 of these padded types. */
1410 if (TREE_CONSTANT (TYPE_SIZE (record)))
1411 {
1412 tree canonical = canonicalize_pad_type (record);
1413 if (canonical != record)
1414 {
1415 record = canonical;
1416 goto built;
1417 }
1418 }
1419 }
1420
1421 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1422 SET_TYPE_DEBUG_TYPE (record, type);
1423
1424 /* Unless debugging information isn't being written for the input type,
1425 write a record that shows what we are a subtype of and also make a
1426 variable that indicates our size, if still variable. */
1427 if (TREE_CODE (orig_size) != INTEGER_CST
1428 && TYPE_NAME (record)
1429 && TYPE_NAME (type)
1430 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1431 && DECL_IGNORED_P (TYPE_NAME (type))))
1432 {
1433 tree name = TYPE_IDENTIFIER (record);
1434 tree size_unit = TYPE_SIZE_UNIT (record);
1435
1436 /* A variable that holds the size is required even with no encoding since
1437 it will be referenced by debugging information attributes. At global
1438 level, we need a single variable across all translation units. */
1439 if (size
1440 && TREE_CODE (size) != INTEGER_CST
1441 && (definition || global_bindings_p ()))
1442 {
1443 /* Whether or not gnat_entity comes from source, this XVZ variable is
1444 is a compilation artifact. */
1445 size_unit
1446 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1447 size_unit, true, global_bindings_p (),
1448 !definition && global_bindings_p (), false,
1449 false, true, true, NULL, gnat_entity);
1450 TYPE_SIZE_UNIT (record) = size_unit;
1451 }
1452
1453 /* There is no need to show what we are a subtype of when outputting as
1454 few encodings as possible: regular debugging infomation makes this
1455 redundant. */
1456 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1457 {
1458 tree marker = make_node (RECORD_TYPE);
1459 tree orig_name = TYPE_IDENTIFIER (type);
1460
1461 TYPE_NAME (marker) = concat_name (name, "XVS");
1462 finish_record_type (marker,
1463 create_field_decl (orig_name,
1464 build_reference_type (type),
1465 marker, NULL_TREE, NULL_TREE,
1466 0, 0),
1467 0, true);
1468 TYPE_SIZE_UNIT (marker) = size_unit;
1469
1470 add_parallel_type (record, marker);
1471 }
1472 }
1473
1474 built:
1475 /* If a simple size was explicitly given, maybe issue a warning. */
1476 if (!size
1477 || TREE_CODE (size) == COND_EXPR
1478 || TREE_CODE (size) == MAX_EXPR
1479 || No (gnat_entity))
1480 return record;
1481
1482 /* But don't do it if we are just annotating types and the type is tagged or
1483 concurrent, since these types aren't fully laid out in this mode. */
1484 if (type_annotate_only)
1485 {
1486 Entity_Id gnat_type
1487 = is_component_type
1488 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1489
1490 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1491 return record;
1492 }
1493
1494 /* Take the original size as the maximum size of the input if there was an
1495 unconstrained record involved and round it up to the specified alignment,
1496 if one was specified, but only for aggregate types. */
1497 if (CONTAINS_PLACEHOLDER_P (orig_size))
1498 orig_size = max_size (orig_size, true);
1499
1500 if (align && AGGREGATE_TYPE_P (type))
1501 orig_size = round_up (orig_size, align);
1502
1503 if (!operand_equal_p (size, orig_size, 0)
1504 && !(TREE_CODE (size) == INTEGER_CST
1505 && TREE_CODE (orig_size) == INTEGER_CST
1506 && (TREE_OVERFLOW (size)
1507 || TREE_OVERFLOW (orig_size)
1508 || tree_int_cst_lt (size, orig_size))))
1509 {
1510 Node_Id gnat_error_node;
1511
1512 /* For a packed array, post the message on the original array type. */
1513 if (Is_Packed_Array_Impl_Type (gnat_entity))
1514 gnat_entity = Original_Array_Type (gnat_entity);
1515
1516 if ((Ekind (gnat_entity) == E_Component
1517 || Ekind (gnat_entity) == E_Discriminant)
1518 && Present (Component_Clause (gnat_entity)))
1519 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1520 else if (Has_Size_Clause (gnat_entity))
1521 gnat_error_node = Expression (Size_Clause (gnat_entity));
1522 else if (Has_Object_Size_Clause (gnat_entity))
1523 gnat_error_node = Expression (Object_Size_Clause (gnat_entity));
1524 else
1525 gnat_error_node = Empty;
1526
1527 /* Generate message only for entities that come from source, since
1528 if we have an entity created by expansion, the message will be
1529 generated for some other corresponding source entity. */
1530 if (Comes_From_Source (gnat_entity))
1531 {
1532 if (Present (gnat_error_node))
1533 post_error_ne_tree ("{^ }bits of & unused?",
1534 gnat_error_node, gnat_entity,
1535 size_diffop (size, orig_size));
1536 else if (is_component_type)
1537 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1538 gnat_entity, gnat_entity,
1539 size_diffop (size, orig_size));
1540 }
1541 }
1542
1543 return record;
1544 }
1545
1546 /* Return true if padded TYPE was built with an RM size. */
1547
1548 bool
1549 pad_type_has_rm_size (tree type)
1550 {
1551 /* This is required for the lookup. */
1552 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1553 return false;
1554
1555 const hashval_t hashcode = hash_pad_type (type);
1556 struct pad_type_hash in, *h;
1557
1558 in.hash = hashcode;
1559 in.type = type;
1560 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1561
1562 /* The types built with an RM size are the canonicalized ones. */
1563 return h && h->type == type;
1564 }
1565
1566 /* Return a copy of the padded TYPE but with reverse storage order. */
1567
1568 tree
1569 set_reverse_storage_order_on_pad_type (tree type)
1570 {
1571 if (flag_checking)
1572 {
1573 /* If the inner type is not scalar then the function does nothing. */
1574 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1575 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1576 && !VECTOR_TYPE_P (inner_type));
1577 }
1578
1579 /* This is required for the canonicalization. */
1580 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1581
1582 tree field = copy_node (TYPE_FIELDS (type));
1583 type = copy_type (type);
1584 DECL_CONTEXT (field) = type;
1585 TYPE_FIELDS (type) = field;
1586 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1587 return canonicalize_pad_type (type);
1588 }
1589 \f
1590 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1591 If this is a multi-dimensional array type, do this recursively.
1592
1593 OP may be
1594 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1595 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1596 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1597
1598 void
1599 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1600 {
1601 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1602 of a one-dimensional array, since the padding has the same alias set
1603 as the field type, but if it's a multi-dimensional array, we need to
1604 see the inner types. */
1605 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1606 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1607 || TYPE_PADDING_P (gnu_old_type)))
1608 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1609
1610 /* Unconstrained array types are deemed incomplete and would thus be given
1611 alias set 0. Retrieve the underlying array type. */
1612 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1613 gnu_old_type
1614 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1615 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1616 gnu_new_type
1617 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1618
1619 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1620 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1621 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1622 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1623
1624 switch (op)
1625 {
1626 case ALIAS_SET_COPY:
1627 /* The alias set shouldn't be copied between array types with different
1628 aliasing settings because this can break the aliasing relationship
1629 between the array type and its element type. */
1630 if (flag_checking || flag_strict_aliasing)
1631 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1632 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1633 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1634 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1635
1636 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1637 break;
1638
1639 case ALIAS_SET_SUBSET:
1640 case ALIAS_SET_SUPERSET:
1641 {
1642 alias_set_type old_set = get_alias_set (gnu_old_type);
1643 alias_set_type new_set = get_alias_set (gnu_new_type);
1644
1645 /* Do nothing if the alias sets conflict. This ensures that we
1646 never call record_alias_subset several times for the same pair
1647 or at all for alias set 0. */
1648 if (!alias_sets_conflict_p (old_set, new_set))
1649 {
1650 if (op == ALIAS_SET_SUBSET)
1651 record_alias_subset (old_set, new_set);
1652 else
1653 record_alias_subset (new_set, old_set);
1654 }
1655 }
1656 break;
1657
1658 default:
1659 gcc_unreachable ();
1660 }
1661
1662 record_component_aliases (gnu_new_type);
1663 }
1664 \f
1665 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1666 ARTIFICIAL_P is true if the type was generated by the compiler. */
1667
1668 void
1669 record_builtin_type (const char *name, tree type, bool artificial_p)
1670 {
1671 tree type_decl = build_decl (input_location,
1672 TYPE_DECL, get_identifier (name), type);
1673 DECL_ARTIFICIAL (type_decl) = artificial_p;
1674 TYPE_ARTIFICIAL (type) = artificial_p;
1675 gnat_pushdecl (type_decl, Empty);
1676
1677 if (debug_hooks->type_decl)
1678 debug_hooks->type_decl (type_decl, false);
1679 }
1680 \f
1681 /* Finish constructing the character type CHAR_TYPE.
1682
1683 In Ada character types are enumeration types and, as a consequence, are
1684 represented in the front-end by integral types holding the positions of
1685 the enumeration values as defined by the language, which means that the
1686 integral types are unsigned.
1687
1688 Unfortunately the signedness of 'char' in C is implementation-defined
1689 and GCC even has the option -fsigned-char to toggle it at run time.
1690 Since GNAT's philosophy is to be compatible with C by default, to wit
1691 Interfaces.C.char is defined as a mere copy of Character, we may need
1692 to declare character types as signed types in GENERIC and generate the
1693 necessary adjustments to make them behave as unsigned types.
1694
1695 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1696 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1697 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1698 types. The idea is to ensure that the bit pattern contained in the
1699 Esize'd objects is not changed, even though the numerical value will
1700 be interpreted differently depending on the signedness. */
1701
1702 void
1703 finish_character_type (tree char_type)
1704 {
1705 if (TYPE_UNSIGNED (char_type))
1706 return;
1707
1708 /* Make a copy of a generic unsigned version since we'll modify it. */
1709 tree unsigned_char_type
1710 = (char_type == char_type_node
1711 ? unsigned_char_type_node
1712 : copy_type (gnat_unsigned_type_for (char_type)));
1713
1714 /* Create an unsigned version of the type and set it as debug type. */
1715 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1716 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1717 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1718 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1719
1720 /* If this is a subtype, make the debug type a subtype of the debug type
1721 of the base type and convert literal RM bounds to unsigned. */
1722 if (TREE_TYPE (char_type))
1723 {
1724 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1725 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1726 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1727
1728 if (TREE_CODE (min_value) == INTEGER_CST)
1729 min_value = fold_convert (base_unsigned_char_type, min_value);
1730 if (TREE_CODE (max_value) == INTEGER_CST)
1731 max_value = fold_convert (base_unsigned_char_type, max_value);
1732
1733 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1734 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1735 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1736 }
1737
1738 /* Adjust the RM bounds of the original type to unsigned; that's especially
1739 important for types since they are implicit in this case. */
1740 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1741 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1742 }
1743
1744 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1745 finish constructing the record type as a fat pointer type. */
1746
1747 void
1748 finish_fat_pointer_type (tree record_type, tree field_list)
1749 {
1750 /* Make sure we can put it into a register. */
1751 if (STRICT_ALIGNMENT)
1752 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1753
1754 /* Show what it really is. */
1755 TYPE_FAT_POINTER_P (record_type) = 1;
1756
1757 /* Do not emit debug info for it since the types of its fields may still be
1758 incomplete at this point. */
1759 finish_record_type (record_type, field_list, 0, false);
1760
1761 /* Force type_contains_placeholder_p to return true on it. Although the
1762 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1763 type but the representation of the unconstrained array. */
1764 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1765 }
1766
1767 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1768 finish constructing the record or union type. If REP_LEVEL is zero, this
1769 record has no representation clause and so will be entirely laid out here.
1770 If REP_LEVEL is one, this record has a representation clause and has been
1771 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1772 this record is derived from a parent record and thus inherits its layout;
1773 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1774 additional debug info needs to be output for this type. */
1775
1776 void
1777 finish_record_type (tree record_type, tree field_list, int rep_level,
1778 bool debug_info_p)
1779 {
1780 enum tree_code code = TREE_CODE (record_type);
1781 tree name = TYPE_IDENTIFIER (record_type);
1782 tree ada_size = bitsize_zero_node;
1783 tree size = bitsize_zero_node;
1784 bool had_size = TYPE_SIZE (record_type) != 0;
1785 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1786 bool had_align = TYPE_ALIGN (record_type) != 0;
1787 tree field;
1788
1789 TYPE_FIELDS (record_type) = field_list;
1790
1791 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1792 generate debug info and have a parallel type. */
1793 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1794
1795 /* Globally initialize the record first. If this is a rep'ed record,
1796 that just means some initializations; otherwise, layout the record. */
1797 if (rep_level > 0)
1798 {
1799 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1800 TYPE_ALIGN (record_type)));
1801
1802 if (!had_size_unit)
1803 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1804
1805 if (!had_size)
1806 TYPE_SIZE (record_type) = bitsize_zero_node;
1807
1808 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1809 out just like a UNION_TYPE, since the size will be fixed. */
1810 else if (code == QUAL_UNION_TYPE)
1811 code = UNION_TYPE;
1812 }
1813 else
1814 {
1815 /* Ensure there isn't a size already set. There can be in an error
1816 case where there is a rep clause but all fields have errors and
1817 no longer have a position. */
1818 TYPE_SIZE (record_type) = 0;
1819
1820 /* Ensure we use the traditional GCC layout for bitfields when we need
1821 to pack the record type or have a representation clause. The other
1822 possible layout (Microsoft C compiler), if available, would prevent
1823 efficient packing in almost all cases. */
1824 #ifdef TARGET_MS_BITFIELD_LAYOUT
1825 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1826 decl_attributes (&record_type,
1827 tree_cons (get_identifier ("gcc_struct"),
1828 NULL_TREE, NULL_TREE),
1829 ATTR_FLAG_TYPE_IN_PLACE);
1830 #endif
1831
1832 layout_type (record_type);
1833 }
1834
1835 /* At this point, the position and size of each field is known. It was
1836 either set before entry by a rep clause, or by laying out the type above.
1837
1838 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1839 to compute the Ada size; the GCC size and alignment (for rep'ed records
1840 that are not padding types); and the mode (for rep'ed records). We also
1841 clear the DECL_BIT_FIELD indication for the cases we know have not been
1842 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1843
1844 if (code == QUAL_UNION_TYPE)
1845 field_list = nreverse (field_list);
1846
1847 for (field = field_list; field; field = DECL_CHAIN (field))
1848 {
1849 tree type = TREE_TYPE (field);
1850 tree pos = bit_position (field);
1851 tree this_size = DECL_SIZE (field);
1852 tree this_ada_size;
1853
1854 if (RECORD_OR_UNION_TYPE_P (type)
1855 && !TYPE_FAT_POINTER_P (type)
1856 && !TYPE_CONTAINS_TEMPLATE_P (type)
1857 && TYPE_ADA_SIZE (type))
1858 this_ada_size = TYPE_ADA_SIZE (type);
1859 else
1860 this_ada_size = this_size;
1861
1862 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1863 if (DECL_BIT_FIELD (field)
1864 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1865 {
1866 unsigned int align = TYPE_ALIGN (type);
1867
1868 /* In the general case, type alignment is required. */
1869 if (value_factor_p (pos, align))
1870 {
1871 /* The enclosing record type must be sufficiently aligned.
1872 Otherwise, if no alignment was specified for it and it
1873 has been laid out already, bump its alignment to the
1874 desired one if this is compatible with its size and
1875 maximum alignment, if any. */
1876 if (TYPE_ALIGN (record_type) >= align)
1877 {
1878 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1879 DECL_BIT_FIELD (field) = 0;
1880 }
1881 else if (!had_align
1882 && rep_level == 0
1883 && value_factor_p (TYPE_SIZE (record_type), align)
1884 && (!TYPE_MAX_ALIGN (record_type)
1885 || TYPE_MAX_ALIGN (record_type) >= align))
1886 {
1887 SET_TYPE_ALIGN (record_type, align);
1888 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1889 DECL_BIT_FIELD (field) = 0;
1890 }
1891 }
1892
1893 /* In the non-strict alignment case, only byte alignment is. */
1894 if (!STRICT_ALIGNMENT
1895 && DECL_BIT_FIELD (field)
1896 && value_factor_p (pos, BITS_PER_UNIT))
1897 DECL_BIT_FIELD (field) = 0;
1898 }
1899
1900 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1901 field is technically not addressable. Except that it can actually
1902 be addressed if it is BLKmode and happens to be properly aligned. */
1903 if (DECL_BIT_FIELD (field)
1904 && !(DECL_MODE (field) == BLKmode
1905 && value_factor_p (pos, BITS_PER_UNIT)))
1906 DECL_NONADDRESSABLE_P (field) = 1;
1907
1908 /* A type must be as aligned as its most aligned field that is not
1909 a bit-field. But this is already enforced by layout_type. */
1910 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1911 SET_TYPE_ALIGN (record_type,
1912 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1913
1914 switch (code)
1915 {
1916 case UNION_TYPE:
1917 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1918 size = size_binop (MAX_EXPR, size, this_size);
1919 break;
1920
1921 case QUAL_UNION_TYPE:
1922 ada_size
1923 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1924 this_ada_size, ada_size);
1925 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1926 this_size, size);
1927 break;
1928
1929 case RECORD_TYPE:
1930 /* Since we know here that all fields are sorted in order of
1931 increasing bit position, the size of the record is one
1932 higher than the ending bit of the last field processed
1933 unless we have a rep clause, since in that case we might
1934 have a field outside a QUAL_UNION_TYPE that has a higher ending
1935 position. So use a MAX in that case. Also, if this field is a
1936 QUAL_UNION_TYPE, we need to take into account the previous size in
1937 the case of empty variants. */
1938 ada_size
1939 = merge_sizes (ada_size, pos, this_ada_size,
1940 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1941 size
1942 = merge_sizes (size, pos, this_size,
1943 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1944 break;
1945
1946 default:
1947 gcc_unreachable ();
1948 }
1949 }
1950
1951 if (code == QUAL_UNION_TYPE)
1952 nreverse (field_list);
1953
1954 if (rep_level < 2)
1955 {
1956 /* If this is a padding record, we never want to make the size smaller
1957 than what was specified in it, if any. */
1958 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1959 size = TYPE_SIZE (record_type);
1960
1961 /* Now set any of the values we've just computed that apply. */
1962 if (!TYPE_FAT_POINTER_P (record_type)
1963 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1964 SET_TYPE_ADA_SIZE (record_type, ada_size);
1965
1966 if (rep_level > 0)
1967 {
1968 tree size_unit = had_size_unit
1969 ? TYPE_SIZE_UNIT (record_type)
1970 : convert (sizetype,
1971 size_binop (CEIL_DIV_EXPR, size,
1972 bitsize_unit_node));
1973 unsigned int align = TYPE_ALIGN (record_type);
1974
1975 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1976 TYPE_SIZE_UNIT (record_type)
1977 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1978
1979 compute_record_mode (record_type);
1980 }
1981 }
1982
1983 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1984 TYPE_MAX_ALIGN (record_type) = 0;
1985
1986 if (debug_info_p)
1987 rest_of_record_type_compilation (record_type);
1988 }
1989
1990 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1991 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1992 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1993 moment TYPE will get a context. */
1994
1995 void
1996 add_parallel_type (tree type, tree parallel_type)
1997 {
1998 tree decl = TYPE_STUB_DECL (type);
1999
2000 while (DECL_PARALLEL_TYPE (decl))
2001 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
2002
2003 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
2004
2005 /* If PARALLEL_TYPE already has a context, we are done. */
2006 if (TYPE_CONTEXT (parallel_type))
2007 return;
2008
2009 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2010 it to PARALLEL_TYPE. */
2011 if (TYPE_CONTEXT (type))
2012 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2013
2014 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2015 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2016 so we have nothing to do in this case. */
2017 }
2018
2019 /* Return true if TYPE has a parallel type. */
2020
2021 static bool
2022 has_parallel_type (tree type)
2023 {
2024 tree decl = TYPE_STUB_DECL (type);
2025
2026 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2027 }
2028
2029 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2030 associated with it. It need not be invoked directly in most cases as
2031 finish_record_type takes care of doing so. */
2032
2033 void
2034 rest_of_record_type_compilation (tree record_type)
2035 {
2036 bool var_size = false;
2037 tree field;
2038
2039 /* If this is a padded type, the bulk of the debug info has already been
2040 generated for the field's type. */
2041 if (TYPE_IS_PADDING_P (record_type))
2042 return;
2043
2044 /* If the type already has a parallel type (XVS type), then we're done. */
2045 if (has_parallel_type (record_type))
2046 return;
2047
2048 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2049 {
2050 /* We need to make an XVE/XVU record if any field has variable size,
2051 whether or not the record does. For example, if we have a union,
2052 it may be that all fields, rounded up to the alignment, have the
2053 same size, in which case we'll use that size. But the debug
2054 output routines (except Dwarf2) won't be able to output the fields,
2055 so we need to make the special record. */
2056 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2057 /* If a field has a non-constant qualifier, the record will have
2058 variable size too. */
2059 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2060 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2061 {
2062 var_size = true;
2063 break;
2064 }
2065 }
2066
2067 /* If this record type is of variable size, make a parallel record type that
2068 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2069 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2070 {
2071 tree new_record_type
2072 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2073 ? UNION_TYPE : TREE_CODE (record_type));
2074 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2075 tree last_pos = bitsize_zero_node;
2076 tree old_field, prev_old_field = NULL_TREE;
2077
2078 new_name
2079 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2080 ? "XVU" : "XVE");
2081 TYPE_NAME (new_record_type) = new_name;
2082 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2083 TYPE_STUB_DECL (new_record_type)
2084 = create_type_stub_decl (new_name, new_record_type);
2085 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2086 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2087 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2088 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2089 TYPE_SIZE_UNIT (new_record_type)
2090 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2091
2092 /* Now scan all the fields, replacing each field with a new field
2093 corresponding to the new encoding. */
2094 for (old_field = TYPE_FIELDS (record_type); old_field;
2095 old_field = DECL_CHAIN (old_field))
2096 {
2097 tree field_type = TREE_TYPE (old_field);
2098 tree field_name = DECL_NAME (old_field);
2099 tree curpos = fold_bit_position (old_field);
2100 tree pos, new_field;
2101 bool var = false;
2102 unsigned int align = 0;
2103
2104 /* See how the position was modified from the last position.
2105
2106 There are two basic cases we support: a value was added
2107 to the last position or the last position was rounded to
2108 a boundary and they something was added. Check for the
2109 first case first. If not, see if there is any evidence
2110 of rounding. If so, round the last position and retry.
2111
2112 If this is a union, the position can be taken as zero. */
2113 if (TREE_CODE (new_record_type) == UNION_TYPE)
2114 pos = bitsize_zero_node;
2115 else
2116 pos = compute_related_constant (curpos, last_pos);
2117
2118 if (!pos
2119 && TREE_CODE (curpos) == MULT_EXPR
2120 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2121 {
2122 tree offset = TREE_OPERAND (curpos, 0);
2123 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2124 align = scale_by_factor_of (offset, align);
2125 last_pos = round_up (last_pos, align);
2126 pos = compute_related_constant (curpos, last_pos);
2127 }
2128 else if (!pos
2129 && TREE_CODE (curpos) == PLUS_EXPR
2130 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2131 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2132 && tree_fits_uhwi_p
2133 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2134 {
2135 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2136 unsigned HOST_WIDE_INT addend
2137 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2138 align
2139 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2140 align = scale_by_factor_of (offset, align);
2141 align = MIN (align, addend & -addend);
2142 last_pos = round_up (last_pos, align);
2143 pos = compute_related_constant (curpos, last_pos);
2144 }
2145 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2146 {
2147 align = TYPE_ALIGN (field_type);
2148 last_pos = round_up (last_pos, align);
2149 pos = compute_related_constant (curpos, last_pos);
2150 }
2151
2152 /* If we can't compute a position, set it to zero.
2153
2154 ??? We really should abort here, but it's too much work
2155 to get this correct for all cases. */
2156 if (!pos)
2157 pos = bitsize_zero_node;
2158
2159 /* See if this type is variable-sized and make a pointer type
2160 and indicate the indirection if so. Beware that the debug
2161 back-end may adjust the position computed above according
2162 to the alignment of the field type, i.e. the pointer type
2163 in this case, if we don't preventively counter that. */
2164 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2165 {
2166 field_type = build_pointer_type (field_type);
2167 if (align != 0 && TYPE_ALIGN (field_type) > align)
2168 {
2169 field_type = copy_type (field_type);
2170 SET_TYPE_ALIGN (field_type, align);
2171 }
2172 var = true;
2173 }
2174
2175 /* Make a new field name, if necessary. */
2176 if (var || align != 0)
2177 {
2178 char suffix[16];
2179
2180 if (align != 0)
2181 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2182 align / BITS_PER_UNIT);
2183 else
2184 strcpy (suffix, "XVL");
2185
2186 field_name = concat_name (field_name, suffix);
2187 }
2188
2189 new_field
2190 = create_field_decl (field_name, field_type, new_record_type,
2191 DECL_SIZE (old_field), pos, 0, 0);
2192 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2193 TYPE_FIELDS (new_record_type) = new_field;
2194
2195 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2196 zero. The only time it's not the last field of the record
2197 is when there are other components at fixed positions after
2198 it (meaning there was a rep clause for every field) and we
2199 want to be able to encode them. */
2200 last_pos = size_binop (PLUS_EXPR, curpos,
2201 (TREE_CODE (TREE_TYPE (old_field))
2202 == QUAL_UNION_TYPE)
2203 ? bitsize_zero_node
2204 : DECL_SIZE (old_field));
2205 prev_old_field = old_field;
2206 }
2207
2208 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2209
2210 add_parallel_type (record_type, new_record_type);
2211 }
2212 }
2213
2214 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2215 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2216 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2217 replace a value of zero with the old size. If HAS_REP is true, we take the
2218 MAX of the end position of this field with LAST_SIZE. In all other cases,
2219 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2220
2221 static tree
2222 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2223 bool has_rep)
2224 {
2225 tree type = TREE_TYPE (last_size);
2226 tree new_size;
2227
2228 if (!special || TREE_CODE (size) != COND_EXPR)
2229 {
2230 new_size = size_binop (PLUS_EXPR, first_bit, size);
2231 if (has_rep)
2232 new_size = size_binop (MAX_EXPR, last_size, new_size);
2233 }
2234
2235 else
2236 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2237 integer_zerop (TREE_OPERAND (size, 1))
2238 ? last_size : merge_sizes (last_size, first_bit,
2239 TREE_OPERAND (size, 1),
2240 1, has_rep),
2241 integer_zerop (TREE_OPERAND (size, 2))
2242 ? last_size : merge_sizes (last_size, first_bit,
2243 TREE_OPERAND (size, 2),
2244 1, has_rep));
2245
2246 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2247 when fed through substitute_in_expr) into thinking that a constant
2248 size is not constant. */
2249 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2250 new_size = TREE_OPERAND (new_size, 0);
2251
2252 return new_size;
2253 }
2254
2255 /* Return the bit position of FIELD, in bits from the start of the record,
2256 and fold it as much as possible. This is a tree of type bitsizetype. */
2257
2258 static tree
2259 fold_bit_position (const_tree field)
2260 {
2261 tree offset = DECL_FIELD_OFFSET (field);
2262 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2263 offset = size_binop (TREE_CODE (offset),
2264 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2265 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2266 else
2267 offset = fold_convert (bitsizetype, offset);
2268 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2269 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2270 }
2271
2272 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2273 related by the addition of a constant. Return that constant if so. */
2274
2275 static tree
2276 compute_related_constant (tree op0, tree op1)
2277 {
2278 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2279
2280 if (TREE_CODE (op0) == MULT_EXPR
2281 && TREE_CODE (op1) == MULT_EXPR
2282 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2283 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2284 {
2285 factor = TREE_OPERAND (op0, 1);
2286 op0 = TREE_OPERAND (op0, 0);
2287 op1 = TREE_OPERAND (op1, 0);
2288 }
2289 else
2290 factor = NULL_TREE;
2291
2292 op0_cst = split_plus (op0, &op0_var);
2293 op1_cst = split_plus (op1, &op1_var);
2294 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2295
2296 if (operand_equal_p (op0_var, op1_var, 0))
2297 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2298
2299 return NULL_TREE;
2300 }
2301
2302 /* Utility function of above to split a tree OP which may be a sum, into a
2303 constant part, which is returned, and a variable part, which is stored
2304 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2305 bitsizetype. */
2306
2307 static tree
2308 split_plus (tree in, tree *pvar)
2309 {
2310 /* Strip conversions in order to ease the tree traversal and maximize the
2311 potential for constant or plus/minus discovery. We need to be careful
2312 to always return and set *pvar to bitsizetype trees, but it's worth
2313 the effort. */
2314 in = remove_conversions (in, false);
2315
2316 *pvar = convert (bitsizetype, in);
2317
2318 if (TREE_CODE (in) == INTEGER_CST)
2319 {
2320 *pvar = bitsize_zero_node;
2321 return convert (bitsizetype, in);
2322 }
2323 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2324 {
2325 tree lhs_var, rhs_var;
2326 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2327 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2328
2329 if (lhs_var == TREE_OPERAND (in, 0)
2330 && rhs_var == TREE_OPERAND (in, 1))
2331 return bitsize_zero_node;
2332
2333 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2334 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2335 }
2336 else
2337 return bitsize_zero_node;
2338 }
2339 \f
2340 /* Return a copy of TYPE but safe to modify in any way. */
2341
2342 tree
2343 copy_type (tree type)
2344 {
2345 tree new_type = copy_node (type);
2346
2347 /* Unshare the language-specific data. */
2348 if (TYPE_LANG_SPECIFIC (type))
2349 {
2350 TYPE_LANG_SPECIFIC (new_type) = NULL;
2351 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2352 }
2353
2354 /* And the contents of the language-specific slot if needed. */
2355 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2356 && TYPE_RM_VALUES (type))
2357 {
2358 TYPE_RM_VALUES (new_type) = NULL_TREE;
2359 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2360 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2361 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2362 }
2363
2364 /* copy_node clears this field instead of copying it, because it is
2365 aliased with TREE_CHAIN. */
2366 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2367
2368 TYPE_POINTER_TO (new_type) = NULL_TREE;
2369 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2370 TYPE_MAIN_VARIANT (new_type) = new_type;
2371 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2372 TYPE_CANONICAL (new_type) = new_type;
2373
2374 return new_type;
2375 }
2376 \f
2377 /* Return a subtype of sizetype with range MIN to MAX and whose
2378 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2379 of the associated TYPE_DECL. */
2380
2381 tree
2382 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2383 {
2384 /* First build a type for the desired range. */
2385 tree type = build_nonshared_range_type (sizetype, min, max);
2386
2387 /* Then set the index type. */
2388 SET_TYPE_INDEX_TYPE (type, index);
2389 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2390
2391 return type;
2392 }
2393
2394 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2395 sizetype is used. */
2396
2397 tree
2398 create_range_type (tree type, tree min, tree max)
2399 {
2400 tree range_type;
2401
2402 if (!type)
2403 type = sizetype;
2404
2405 /* First build a type with the base range. */
2406 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2407 TYPE_MAX_VALUE (type));
2408
2409 /* Then set the actual range. */
2410 SET_TYPE_RM_MIN_VALUE (range_type, min);
2411 SET_TYPE_RM_MAX_VALUE (range_type, max);
2412
2413 return range_type;
2414 }
2415 \f
2416 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2417 NAME gives the name of the type to be used in the declaration. */
2418
2419 tree
2420 create_type_stub_decl (tree name, tree type)
2421 {
2422 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2423 DECL_ARTIFICIAL (type_decl) = 1;
2424 TYPE_ARTIFICIAL (type) = 1;
2425 return type_decl;
2426 }
2427
2428 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2429 used in the declaration. ARTIFICIAL_P is true if the declaration was
2430 generated by the compiler. DEBUG_INFO_P is true if we need to write
2431 debug information about this type. GNAT_NODE is used for the position
2432 of the decl. */
2433
2434 tree
2435 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2436 Node_Id gnat_node)
2437 {
2438 enum tree_code code = TREE_CODE (type);
2439 bool is_named
2440 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2441 tree type_decl;
2442
2443 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2444 gcc_assert (!TYPE_IS_DUMMY_P (type));
2445
2446 /* If the type hasn't been named yet, we're naming it; preserve an existing
2447 TYPE_STUB_DECL that has been attached to it for some purpose. */
2448 if (!is_named && TYPE_STUB_DECL (type))
2449 {
2450 type_decl = TYPE_STUB_DECL (type);
2451 DECL_NAME (type_decl) = name;
2452 }
2453 else
2454 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2455
2456 DECL_ARTIFICIAL (type_decl) = artificial_p;
2457 TYPE_ARTIFICIAL (type) = artificial_p;
2458
2459 /* Add this decl to the current binding level. */
2460 gnat_pushdecl (type_decl, gnat_node);
2461
2462 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2463 causes the name to be also viewed as a "tag" by the debug back-end, with
2464 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2465 types in DWARF.
2466
2467 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2468 from multiple contexts, and "type_decl" references a copy of it: in such a
2469 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2470 with the mechanism above. */
2471 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2472 TYPE_STUB_DECL (type) = type_decl;
2473
2474 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2475 back-end doesn't support, and for others if we don't need to. */
2476 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2477 DECL_IGNORED_P (type_decl) = 1;
2478
2479 return type_decl;
2480 }
2481 \f
2482 /* Return a VAR_DECL or CONST_DECL node.
2483
2484 NAME gives the name of the variable. ASM_NAME is its assembler name
2485 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2486 the GCC tree for an optional initial expression; NULL_TREE if none.
2487
2488 CONST_FLAG is true if this variable is constant, in which case we might
2489 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2490
2491 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2492 definition to be made visible outside of the current compilation unit, for
2493 instance variable definitions in a package specification.
2494
2495 EXTERN_FLAG is true when processing an external variable declaration (as
2496 opposed to a definition: no storage is to be allocated for the variable).
2497
2498 STATIC_FLAG is only relevant when not at top level and indicates whether
2499 to always allocate storage to the variable.
2500
2501 VOLATILE_FLAG is true if this variable is declared as volatile.
2502
2503 ARTIFICIAL_P is true if the variable was generated by the compiler.
2504
2505 DEBUG_INFO_P is true if we need to write debug information for it.
2506
2507 ATTR_LIST is the list of attributes to be attached to the variable.
2508
2509 GNAT_NODE is used for the position of the decl. */
2510
2511 tree
2512 create_var_decl (tree name, tree asm_name, tree type, tree init,
2513 bool const_flag, bool public_flag, bool extern_flag,
2514 bool static_flag, bool volatile_flag, bool artificial_p,
2515 bool debug_info_p, struct attrib *attr_list,
2516 Node_Id gnat_node, bool const_decl_allowed_p)
2517 {
2518 /* Whether the object has static storage duration, either explicitly or by
2519 virtue of being declared at the global level. */
2520 const bool static_storage = static_flag || global_bindings_p ();
2521
2522 /* Whether the initializer is constant: for an external object or an object
2523 with static storage duration, we check that the initializer is a valid
2524 constant expression for initializing a static variable; otherwise, we
2525 only check that it is constant. */
2526 const bool init_const
2527 = (init
2528 && gnat_types_compatible_p (type, TREE_TYPE (init))
2529 && (extern_flag || static_storage
2530 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2531 != NULL_TREE
2532 : TREE_CONSTANT (init)));
2533
2534 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2535 case the initializer may be used in lieu of the DECL node (as done in
2536 Identifier_to_gnu). This is useful to prevent the need of elaboration
2537 code when an identifier for which such a DECL is made is in turn used
2538 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2539 but extra constraints apply to this choice (see below) and they are not
2540 relevant to the distinction we wish to make. */
2541 const bool constant_p = const_flag && init_const;
2542
2543 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2544 and may be used for scalars in general but not for aggregates. */
2545 tree var_decl
2546 = build_decl (input_location,
2547 (constant_p
2548 && const_decl_allowed_p
2549 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2550 name, type);
2551
2552 /* Detect constants created by the front-end to hold 'reference to function
2553 calls for stabilization purposes. This is needed for renaming. */
2554 if (const_flag && init && POINTER_TYPE_P (type))
2555 {
2556 tree inner = init;
2557 if (TREE_CODE (inner) == COMPOUND_EXPR)
2558 inner = TREE_OPERAND (inner, 1);
2559 inner = remove_conversions (inner, true);
2560 if (TREE_CODE (inner) == ADDR_EXPR
2561 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2562 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2563 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2564 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2565 DECL_RETURN_VALUE_P (var_decl) = 1;
2566 }
2567
2568 /* If this is external, throw away any initializations (they will be done
2569 elsewhere) unless this is a constant for which we would like to remain
2570 able to get the initializer. If we are defining a global here, leave a
2571 constant initialization and save any variable elaborations for the
2572 elaboration routine. If we are just annotating types, throw away the
2573 initialization if it isn't a constant. */
2574 if ((extern_flag && !constant_p)
2575 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2576 init = NULL_TREE;
2577
2578 /* At the global level, a non-constant initializer generates elaboration
2579 statements. Check that such statements are allowed, that is to say,
2580 not violating a No_Elaboration_Code restriction. */
2581 if (init && !init_const && global_bindings_p ())
2582 Check_Elaboration_Code_Allowed (gnat_node);
2583
2584 /* Attach the initializer, if any. */
2585 DECL_INITIAL (var_decl) = init;
2586
2587 /* Directly set some flags. */
2588 DECL_ARTIFICIAL (var_decl) = artificial_p;
2589 DECL_EXTERNAL (var_decl) = extern_flag;
2590
2591 TREE_CONSTANT (var_decl) = constant_p;
2592 TREE_READONLY (var_decl) = const_flag;
2593
2594 /* The object is public if it is external or if it is declared public
2595 and has static storage duration. */
2596 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2597
2598 /* We need to allocate static storage for an object with static storage
2599 duration if it isn't external. */
2600 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2601
2602 TREE_SIDE_EFFECTS (var_decl)
2603 = TREE_THIS_VOLATILE (var_decl)
2604 = TYPE_VOLATILE (type) | volatile_flag;
2605
2606 if (TREE_SIDE_EFFECTS (var_decl))
2607 TREE_ADDRESSABLE (var_decl) = 1;
2608
2609 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2610 try to fiddle with DECL_COMMON. However, on platforms that don't
2611 support global BSS sections, uninitialized global variables would
2612 go in DATA instead, thus increasing the size of the executable. */
2613 if (!flag_no_common
2614 && TREE_CODE (var_decl) == VAR_DECL
2615 && TREE_PUBLIC (var_decl)
2616 && !have_global_bss_p ())
2617 DECL_COMMON (var_decl) = 1;
2618
2619 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2620 since we will create an associated variable. Likewise for an external
2621 constant whose initializer is not absolute, because this would mean a
2622 global relocation in a read-only section which runs afoul of the PE-COFF
2623 run-time relocation mechanism. */
2624 if (!debug_info_p
2625 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2626 || (extern_flag
2627 && constant_p
2628 && init
2629 && initializer_constant_valid_p (init, TREE_TYPE (init))
2630 != null_pointer_node))
2631 DECL_IGNORED_P (var_decl) = 1;
2632
2633 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2634 if (TREE_CODE (var_decl) == VAR_DECL)
2635 process_attributes (&var_decl, &attr_list, true, gnat_node);
2636
2637 /* Add this decl to the current binding level. */
2638 gnat_pushdecl (var_decl, gnat_node);
2639
2640 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2641 {
2642 /* Let the target mangle the name if this isn't a verbatim asm. */
2643 if (*IDENTIFIER_POINTER (asm_name) != '*')
2644 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2645
2646 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2647 }
2648
2649 return var_decl;
2650 }
2651 \f
2652 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2653
2654 static bool
2655 aggregate_type_contains_array_p (tree type)
2656 {
2657 switch (TREE_CODE (type))
2658 {
2659 case RECORD_TYPE:
2660 case UNION_TYPE:
2661 case QUAL_UNION_TYPE:
2662 {
2663 tree field;
2664 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2665 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2666 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2667 return true;
2668 return false;
2669 }
2670
2671 case ARRAY_TYPE:
2672 return true;
2673
2674 default:
2675 gcc_unreachable ();
2676 }
2677 }
2678
2679 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2680 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2681 is the specified size of the field. If POS is nonzero, it is the bit
2682 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2683 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2684 means we are allowed to take the address of the field; if it is negative,
2685 we should not make a bitfield, which is used by make_aligning_type. */
2686
2687 tree
2688 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2689 int packed, int addressable)
2690 {
2691 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2692
2693 DECL_CONTEXT (field_decl) = record_type;
2694 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2695
2696 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2697 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2698 Likewise for an aggregate without specified position that contains an
2699 array, because in this case slices of variable length of this array
2700 must be handled by GCC and variable-sized objects need to be aligned
2701 to at least a byte boundary. */
2702 if (packed && (TYPE_MODE (type) == BLKmode
2703 || (!pos
2704 && AGGREGATE_TYPE_P (type)
2705 && aggregate_type_contains_array_p (type))))
2706 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2707
2708 /* If a size is specified, use it. Otherwise, if the record type is packed
2709 compute a size to use, which may differ from the object's natural size.
2710 We always set a size in this case to trigger the checks for bitfield
2711 creation below, which is typically required when no position has been
2712 specified. */
2713 if (size)
2714 size = convert (bitsizetype, size);
2715 else if (packed == 1)
2716 {
2717 size = rm_size (type);
2718 if (TYPE_MODE (type) == BLKmode)
2719 size = round_up (size, BITS_PER_UNIT);
2720 }
2721
2722 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2723 specified for two reasons: first if the size differs from the natural
2724 size. Second, if the alignment is insufficient. There are a number of
2725 ways the latter can be true.
2726
2727 We never make a bitfield if the type of the field has a nonconstant size,
2728 because no such entity requiring bitfield operations should reach here.
2729
2730 We do *preventively* make a bitfield when there might be the need for it
2731 but we don't have all the necessary information to decide, as is the case
2732 of a field with no specified position in a packed record.
2733
2734 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2735 in layout_decl or finish_record_type to clear the bit_field indication if
2736 it is in fact not needed. */
2737 if (addressable >= 0
2738 && size
2739 && TREE_CODE (size) == INTEGER_CST
2740 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2741 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2742 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2743 || packed
2744 || (TYPE_ALIGN (record_type) != 0
2745 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2746 {
2747 DECL_BIT_FIELD (field_decl) = 1;
2748 DECL_SIZE (field_decl) = size;
2749 if (!packed && !pos)
2750 {
2751 if (TYPE_ALIGN (record_type) != 0
2752 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2753 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2754 else
2755 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2756 }
2757 }
2758
2759 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2760
2761 /* Bump the alignment if need be, either for bitfield/packing purposes or
2762 to satisfy the type requirements if no such consideration applies. When
2763 we get the alignment from the type, indicate if this is from an explicit
2764 user request, which prevents stor-layout from lowering it later on. */
2765 {
2766 unsigned int bit_align
2767 = (DECL_BIT_FIELD (field_decl) ? 1
2768 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2769
2770 if (bit_align > DECL_ALIGN (field_decl))
2771 SET_DECL_ALIGN (field_decl, bit_align);
2772 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2773 {
2774 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2775 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2776 }
2777 }
2778
2779 if (pos)
2780 {
2781 /* We need to pass in the alignment the DECL is known to have.
2782 This is the lowest-order bit set in POS, but no more than
2783 the alignment of the record, if one is specified. Note
2784 that an alignment of 0 is taken as infinite. */
2785 unsigned int known_align;
2786
2787 if (tree_fits_uhwi_p (pos))
2788 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2789 else
2790 known_align = BITS_PER_UNIT;
2791
2792 if (TYPE_ALIGN (record_type)
2793 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2794 known_align = TYPE_ALIGN (record_type);
2795
2796 layout_decl (field_decl, known_align);
2797 SET_DECL_OFFSET_ALIGN (field_decl,
2798 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2799 : BITS_PER_UNIT);
2800 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2801 &DECL_FIELD_BIT_OFFSET (field_decl),
2802 DECL_OFFSET_ALIGN (field_decl), pos);
2803 }
2804
2805 /* In addition to what our caller says, claim the field is addressable if we
2806 know that its type is not suitable.
2807
2808 The field may also be "technically" nonaddressable, meaning that even if
2809 we attempt to take the field's address we will actually get the address
2810 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2811 value we have at this point is not accurate enough, so we don't account
2812 for this here and let finish_record_type decide. */
2813 if (!addressable && !type_for_nonaliased_component_p (type))
2814 addressable = 1;
2815
2816 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2817
2818 return field_decl;
2819 }
2820 \f
2821 /* Return a PARM_DECL node with NAME and TYPE. */
2822
2823 tree
2824 create_param_decl (tree name, tree type)
2825 {
2826 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2827
2828 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2829 can lead to various ABI violations. */
2830 if (targetm.calls.promote_prototypes (NULL_TREE)
2831 && INTEGRAL_TYPE_P (type)
2832 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2833 {
2834 /* We have to be careful about biased types here. Make a subtype
2835 of integer_type_node with the proper biasing. */
2836 if (TREE_CODE (type) == INTEGER_TYPE
2837 && TYPE_BIASED_REPRESENTATION_P (type))
2838 {
2839 tree subtype
2840 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2841 TREE_TYPE (subtype) = integer_type_node;
2842 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2843 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2844 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2845 type = subtype;
2846 }
2847 else
2848 type = integer_type_node;
2849 }
2850
2851 DECL_ARG_TYPE (param_decl) = type;
2852 return param_decl;
2853 }
2854 \f
2855 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2856 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2857 changed. GNAT_NODE is used for the position of error messages. */
2858
2859 void
2860 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2861 Node_Id gnat_node)
2862 {
2863 struct attrib *attr;
2864
2865 for (attr = *attr_list; attr; attr = attr->next)
2866 switch (attr->type)
2867 {
2868 case ATTR_MACHINE_ATTRIBUTE:
2869 Sloc_to_locus (Sloc (gnat_node), &input_location);
2870 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2871 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2872 break;
2873
2874 case ATTR_LINK_ALIAS:
2875 if (!DECL_EXTERNAL (*node))
2876 {
2877 TREE_STATIC (*node) = 1;
2878 assemble_alias (*node, attr->name);
2879 }
2880 break;
2881
2882 case ATTR_WEAK_EXTERNAL:
2883 if (SUPPORTS_WEAK)
2884 declare_weak (*node);
2885 else
2886 post_error ("?weak declarations not supported on this target",
2887 attr->error_point);
2888 break;
2889
2890 case ATTR_LINK_SECTION:
2891 if (targetm_common.have_named_sections)
2892 {
2893 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2894 DECL_COMMON (*node) = 0;
2895 }
2896 else
2897 post_error ("?section attributes are not supported for this target",
2898 attr->error_point);
2899 break;
2900
2901 case ATTR_LINK_CONSTRUCTOR:
2902 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2903 TREE_USED (*node) = 1;
2904 break;
2905
2906 case ATTR_LINK_DESTRUCTOR:
2907 DECL_STATIC_DESTRUCTOR (*node) = 1;
2908 TREE_USED (*node) = 1;
2909 break;
2910
2911 case ATTR_THREAD_LOCAL_STORAGE:
2912 set_decl_tls_model (*node, decl_default_tls_model (*node));
2913 DECL_COMMON (*node) = 0;
2914 break;
2915 }
2916
2917 *attr_list = NULL;
2918 }
2919
2920 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2921 a power of 2. */
2922
2923 bool
2924 value_factor_p (tree value, HOST_WIDE_INT factor)
2925 {
2926 if (tree_fits_uhwi_p (value))
2927 return tree_to_uhwi (value) % factor == 0;
2928
2929 if (TREE_CODE (value) == MULT_EXPR)
2930 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2931 || value_factor_p (TREE_OPERAND (value, 1), factor));
2932
2933 return false;
2934 }
2935
2936 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2937 from the parameter association for the instantiation of a generic. We do
2938 not want to emit source location for them: the code generated for their
2939 initialization is likely to disturb debugging. */
2940
2941 bool
2942 renaming_from_instantiation_p (Node_Id gnat_node)
2943 {
2944 if (Nkind (gnat_node) != N_Defining_Identifier
2945 || !Is_Object (gnat_node)
2946 || Comes_From_Source (gnat_node)
2947 || !Present (Renamed_Object (gnat_node)))
2948 return false;
2949
2950 /* Get the object declaration of the renamed object, if any and if the
2951 renamed object is a mere identifier. */
2952 gnat_node = Renamed_Object (gnat_node);
2953 if (Nkind (gnat_node) != N_Identifier)
2954 return false;
2955
2956 gnat_node = Entity (gnat_node);
2957 if (!Present (Parent (gnat_node)))
2958 return false;
2959
2960 gnat_node = Parent (gnat_node);
2961 return
2962 (Present (gnat_node)
2963 && Nkind (gnat_node) == N_Object_Declaration
2964 && Present (Corresponding_Generic_Association (gnat_node)));
2965 }
2966
2967 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2968 feed it with the elaboration of GNAT_SCOPE. */
2969
2970 static struct deferred_decl_context_node *
2971 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2972 {
2973 struct deferred_decl_context_node *new_node;
2974
2975 new_node
2976 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2977 new_node->decl = decl;
2978 new_node->gnat_scope = gnat_scope;
2979 new_node->force_global = force_global;
2980 new_node->types.create (1);
2981 new_node->next = deferred_decl_context_queue;
2982 deferred_decl_context_queue = new_node;
2983 return new_node;
2984 }
2985
2986 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2987 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2988 computed. */
2989
2990 static void
2991 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2992 {
2993 n->types.safe_push (type);
2994 }
2995
2996 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2997 NULL_TREE if it is not available. */
2998
2999 static tree
3000 compute_deferred_decl_context (Entity_Id gnat_scope)
3001 {
3002 tree context;
3003
3004 if (present_gnu_tree (gnat_scope))
3005 context = get_gnu_tree (gnat_scope);
3006 else
3007 return NULL_TREE;
3008
3009 if (TREE_CODE (context) == TYPE_DECL)
3010 {
3011 const tree context_type = TREE_TYPE (context);
3012
3013 /* Skip dummy types: only the final ones can appear in the context
3014 chain. */
3015 if (TYPE_DUMMY_P (context_type))
3016 return NULL_TREE;
3017
3018 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3019 chain. */
3020 else
3021 context = context_type;
3022 }
3023
3024 return context;
3025 }
3026
3027 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3028 that cannot be processed yet, remove the other ones. If FORCE is true,
3029 force the processing for all nodes, use the global context when nodes don't
3030 have a GNU translation. */
3031
3032 void
3033 process_deferred_decl_context (bool force)
3034 {
3035 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3036 struct deferred_decl_context_node *node;
3037
3038 while (*it)
3039 {
3040 bool processed = false;
3041 tree context = NULL_TREE;
3042 Entity_Id gnat_scope;
3043
3044 node = *it;
3045
3046 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3047 get the first scope. */
3048 gnat_scope = node->gnat_scope;
3049 while (Present (gnat_scope))
3050 {
3051 context = compute_deferred_decl_context (gnat_scope);
3052 if (!force || context)
3053 break;
3054 gnat_scope = get_debug_scope (gnat_scope, NULL);
3055 }
3056
3057 /* Imported declarations must not be in a local context (i.e. not inside
3058 a function). */
3059 if (context && node->force_global > 0)
3060 {
3061 tree ctx = context;
3062
3063 while (ctx)
3064 {
3065 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3066 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3067 }
3068 }
3069
3070 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3071 was no elaborated scope, use the global context. */
3072 if (force && !context)
3073 context = get_global_context ();
3074
3075 if (context)
3076 {
3077 tree t;
3078 int i;
3079
3080 DECL_CONTEXT (node->decl) = context;
3081
3082 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3083 ..._TYPE nodes. */
3084 FOR_EACH_VEC_ELT (node->types, i, t)
3085 {
3086 gnat_set_type_context (t, context);
3087 }
3088 processed = true;
3089 }
3090
3091 /* If this node has been successfuly processed, remove it from the
3092 queue. Then move to the next node. */
3093 if (processed)
3094 {
3095 *it = node->next;
3096 node->types.release ();
3097 free (node);
3098 }
3099 else
3100 it = &node->next;
3101 }
3102 }
3103
3104 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3105
3106 static unsigned int
3107 scale_by_factor_of (tree expr, unsigned int value)
3108 {
3109 unsigned HOST_WIDE_INT addend = 0;
3110 unsigned HOST_WIDE_INT factor = 1;
3111
3112 /* Peel conversions around EXPR and try to extract bodies from function
3113 calls: it is possible to get the scale factor from size functions. */
3114 expr = remove_conversions (expr, true);
3115 if (TREE_CODE (expr) == CALL_EXPR)
3116 expr = maybe_inline_call_in_expr (expr);
3117
3118 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3119 multiple of the scale factor we are looking for. */
3120 if (TREE_CODE (expr) == PLUS_EXPR
3121 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3122 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3123 {
3124 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3125 expr = TREE_OPERAND (expr, 0);
3126 }
3127
3128 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3129 corresponding to the number of trailing zeros of the mask. */
3130 if (TREE_CODE (expr) == BIT_AND_EXPR
3131 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3132 {
3133 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3134 unsigned int i = 0;
3135
3136 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3137 {
3138 mask >>= 1;
3139 factor *= 2;
3140 i++;
3141 }
3142 }
3143
3144 /* If the addend is not a multiple of the factor we found, give up. In
3145 theory we could find a smaller common factor but it's useless for our
3146 needs. This situation arises when dealing with a field F1 with no
3147 alignment requirement but that is following a field F2 with such
3148 requirements. As long as we have F2's offset, we don't need alignment
3149 information to compute F1's. */
3150 if (addend % factor != 0)
3151 factor = 1;
3152
3153 return factor * value;
3154 }
3155
3156 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3157 unless we can prove these 2 fields are laid out in such a way that no gap
3158 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3159 is the distance in bits between the end of PREV_FIELD and the starting
3160 position of CURR_FIELD. It is ignored if null. */
3161
3162 static bool
3163 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3164 {
3165 /* If this is the first field of the record, there cannot be any gap */
3166 if (!prev_field)
3167 return false;
3168
3169 /* If the previous field is a union type, then return false: The only
3170 time when such a field is not the last field of the record is when
3171 there are other components at fixed positions after it (meaning there
3172 was a rep clause for every field), in which case we don't want the
3173 alignment constraint to override them. */
3174 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3175 return false;
3176
3177 /* If the distance between the end of prev_field and the beginning of
3178 curr_field is constant, then there is a gap if the value of this
3179 constant is not null. */
3180 if (offset && tree_fits_uhwi_p (offset))
3181 return !integer_zerop (offset);
3182
3183 /* If the size and position of the previous field are constant,
3184 then check the sum of this size and position. There will be a gap
3185 iff it is not multiple of the current field alignment. */
3186 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3187 && tree_fits_uhwi_p (bit_position (prev_field)))
3188 return ((tree_to_uhwi (bit_position (prev_field))
3189 + tree_to_uhwi (DECL_SIZE (prev_field)))
3190 % DECL_ALIGN (curr_field) != 0);
3191
3192 /* If both the position and size of the previous field are multiples
3193 of the current field alignment, there cannot be any gap. */
3194 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3195 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3196 return false;
3197
3198 /* Fallback, return that there may be a potential gap */
3199 return true;
3200 }
3201
3202 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3203 the decl. */
3204
3205 tree
3206 create_label_decl (tree name, Node_Id gnat_node)
3207 {
3208 tree label_decl
3209 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3210
3211 SET_DECL_MODE (label_decl, VOIDmode);
3212
3213 /* Add this decl to the current binding level. */
3214 gnat_pushdecl (label_decl, gnat_node);
3215
3216 return label_decl;
3217 }
3218 \f
3219 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3220 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3221 the list of its parameters (a list of PARM_DECL nodes chained through the
3222 DECL_CHAIN field).
3223
3224 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3225
3226 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3227 definition to be made visible outside of the current compilation unit.
3228
3229 EXTERN_FLAG is true when processing an external subprogram declaration.
3230
3231 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3232
3233 DEBUG_INFO_P is true if we need to write debug information for it.
3234
3235 DEFINITION is true if the subprogram is to be considered as a definition.
3236
3237 ATTR_LIST is the list of attributes to be attached to the subprogram.
3238
3239 GNAT_NODE is used for the position of the decl. */
3240
3241 tree
3242 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3243 enum inline_status_t inline_status, bool public_flag,
3244 bool extern_flag, bool artificial_p, bool debug_info_p,
3245 bool definition, struct attrib *attr_list,
3246 Node_Id gnat_node)
3247 {
3248 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3249 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3250
3251 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3252 DECL_EXTERNAL (subprog_decl) = extern_flag;
3253 TREE_PUBLIC (subprog_decl) = public_flag;
3254
3255 if (!debug_info_p)
3256 DECL_IGNORED_P (subprog_decl) = 1;
3257 if (definition)
3258 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3259
3260 switch (inline_status)
3261 {
3262 case is_suppressed:
3263 DECL_UNINLINABLE (subprog_decl) = 1;
3264 break;
3265
3266 case is_disabled:
3267 break;
3268
3269 case is_required:
3270 if (Back_End_Inlining)
3271 {
3272 decl_attributes (&subprog_decl,
3273 tree_cons (get_identifier ("always_inline"),
3274 NULL_TREE, NULL_TREE),
3275 ATTR_FLAG_TYPE_IN_PLACE);
3276
3277 /* Inline_Always guarantees that every direct call is inlined and
3278 that there is no indirect reference to the subprogram, so the
3279 instance in the original package (as well as its clones in the
3280 client packages created for inter-unit inlining) can be made
3281 private, which causes the out-of-line body to be eliminated. */
3282 TREE_PUBLIC (subprog_decl) = 0;
3283 }
3284
3285 /* ... fall through ... */
3286
3287 case is_enabled:
3288 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3289 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3290 break;
3291
3292 default:
3293 gcc_unreachable ();
3294 }
3295
3296 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3297
3298 /* Once everything is processed, finish the subprogram declaration. */
3299 finish_subprog_decl (subprog_decl, asm_name, type);
3300
3301 /* Add this decl to the current binding level. */
3302 gnat_pushdecl (subprog_decl, gnat_node);
3303
3304 /* Output the assembler code and/or RTL for the declaration. */
3305 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3306
3307 return subprog_decl;
3308 }
3309
3310 /* Given a subprogram declaration DECL, its assembler name and its type,
3311 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3312
3313 void
3314 finish_subprog_decl (tree decl, tree asm_name, tree type)
3315 {
3316 tree result_decl
3317 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3318 TREE_TYPE (type));
3319
3320 DECL_ARTIFICIAL (result_decl) = 1;
3321 DECL_IGNORED_P (result_decl) = 1;
3322 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3323 DECL_RESULT (decl) = result_decl;
3324
3325 TREE_READONLY (decl) = TYPE_READONLY (type);
3326 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3327
3328 if (asm_name)
3329 {
3330 /* Let the target mangle the name if this isn't a verbatim asm. */
3331 if (*IDENTIFIER_POINTER (asm_name) != '*')
3332 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3333
3334 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3335
3336 /* The expand_main_function circuitry expects "main_identifier_node" to
3337 designate the DECL_NAME of the 'main' entry point, in turn expected
3338 to be declared as the "main" function literally by default. Ada
3339 program entry points are typically declared with a different name
3340 within the binder generated file, exported as 'main' to satisfy the
3341 system expectations. Force main_identifier_node in this case. */
3342 if (asm_name == main_identifier_node)
3343 DECL_NAME (decl) = main_identifier_node;
3344 }
3345 }
3346 \f
3347 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3348 body. This routine needs to be invoked before processing the declarations
3349 appearing in the subprogram. */
3350
3351 void
3352 begin_subprog_body (tree subprog_decl)
3353 {
3354 tree param_decl;
3355
3356 announce_function (subprog_decl);
3357
3358 /* This function is being defined. */
3359 TREE_STATIC (subprog_decl) = 1;
3360
3361 /* The failure of this assertion will likely come from a wrong context for
3362 the subprogram body, e.g. another procedure for a procedure declared at
3363 library level. */
3364 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3365
3366 current_function_decl = subprog_decl;
3367
3368 /* Enter a new binding level and show that all the parameters belong to
3369 this function. */
3370 gnat_pushlevel ();
3371
3372 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3373 param_decl = DECL_CHAIN (param_decl))
3374 DECL_CONTEXT (param_decl) = subprog_decl;
3375
3376 make_decl_rtl (subprog_decl);
3377 }
3378
3379 /* Finish translating the current subprogram and set its BODY. */
3380
3381 void
3382 end_subprog_body (tree body)
3383 {
3384 tree fndecl = current_function_decl;
3385
3386 /* Attach the BLOCK for this level to the function and pop the level. */
3387 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3388 DECL_INITIAL (fndecl) = current_binding_level->block;
3389 gnat_poplevel ();
3390
3391 /* Mark the RESULT_DECL as being in this subprogram. */
3392 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3393
3394 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3395 if (TREE_CODE (body) == BIND_EXPR)
3396 {
3397 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3398 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3399 }
3400
3401 DECL_SAVED_TREE (fndecl) = body;
3402
3403 current_function_decl = decl_function_context (fndecl);
3404 }
3405
3406 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3407
3408 void
3409 rest_of_subprog_body_compilation (tree subprog_decl)
3410 {
3411 /* We cannot track the location of errors past this point. */
3412 error_gnat_node = Empty;
3413
3414 /* If we're only annotating types, don't actually compile this function. */
3415 if (type_annotate_only)
3416 return;
3417
3418 /* Dump functions before gimplification. */
3419 dump_function (TDI_original, subprog_decl);
3420
3421 if (!decl_function_context (subprog_decl))
3422 cgraph_node::finalize_function (subprog_decl, false);
3423 else
3424 /* Register this function with cgraph just far enough to get it
3425 added to our parent's nested function list. */
3426 (void) cgraph_node::get_create (subprog_decl);
3427 }
3428
3429 tree
3430 gnat_builtin_function (tree decl)
3431 {
3432 gnat_pushdecl (decl, Empty);
3433 return decl;
3434 }
3435
3436 /* Return an integer type with the number of bits of precision given by
3437 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3438 it is a signed type. */
3439
3440 tree
3441 gnat_type_for_size (unsigned precision, int unsignedp)
3442 {
3443 tree t;
3444 char type_name[20];
3445
3446 if (precision <= 2 * MAX_BITS_PER_WORD
3447 && signed_and_unsigned_types[precision][unsignedp])
3448 return signed_and_unsigned_types[precision][unsignedp];
3449
3450 if (unsignedp)
3451 t = make_unsigned_type (precision);
3452 else
3453 t = make_signed_type (precision);
3454 TYPE_ARTIFICIAL (t) = 1;
3455
3456 if (precision <= 2 * MAX_BITS_PER_WORD)
3457 signed_and_unsigned_types[precision][unsignedp] = t;
3458
3459 if (!TYPE_NAME (t))
3460 {
3461 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3462 TYPE_NAME (t) = get_identifier (type_name);
3463 }
3464
3465 return t;
3466 }
3467
3468 /* Likewise for floating-point types. */
3469
3470 static tree
3471 float_type_for_precision (int precision, machine_mode mode)
3472 {
3473 tree t;
3474 char type_name[20];
3475
3476 if (float_types[(int) mode])
3477 return float_types[(int) mode];
3478
3479 float_types[(int) mode] = t = make_node (REAL_TYPE);
3480 TYPE_PRECISION (t) = precision;
3481 layout_type (t);
3482
3483 gcc_assert (TYPE_MODE (t) == mode);
3484 if (!TYPE_NAME (t))
3485 {
3486 sprintf (type_name, "FLOAT_%d", precision);
3487 TYPE_NAME (t) = get_identifier (type_name);
3488 }
3489
3490 return t;
3491 }
3492
3493 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3494 an unsigned type; otherwise a signed type is returned. */
3495
3496 tree
3497 gnat_type_for_mode (machine_mode mode, int unsignedp)
3498 {
3499 if (mode == BLKmode)
3500 return NULL_TREE;
3501
3502 if (mode == VOIDmode)
3503 return void_type_node;
3504
3505 if (COMPLEX_MODE_P (mode))
3506 return NULL_TREE;
3507
3508 scalar_float_mode float_mode;
3509 if (is_a <scalar_float_mode> (mode, &float_mode))
3510 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3511 float_mode);
3512
3513 scalar_int_mode int_mode;
3514 if (is_a <scalar_int_mode> (mode, &int_mode))
3515 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3516
3517 if (VECTOR_MODE_P (mode))
3518 {
3519 machine_mode inner_mode = GET_MODE_INNER (mode);
3520 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3521 if (inner_type)
3522 return build_vector_type_for_mode (inner_type, mode);
3523 }
3524
3525 return NULL_TREE;
3526 }
3527
3528 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3529 signedness being specified by UNSIGNEDP. */
3530
3531 tree
3532 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3533 {
3534 if (type_node == char_type_node)
3535 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3536
3537 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3538
3539 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3540 {
3541 type = copy_type (type);
3542 TREE_TYPE (type) = type_node;
3543 }
3544 else if (TREE_TYPE (type_node)
3545 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3546 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3547 {
3548 type = copy_type (type);
3549 TREE_TYPE (type) = TREE_TYPE (type_node);
3550 }
3551
3552 return type;
3553 }
3554
3555 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3556 transparently converted to each other. */
3557
3558 int
3559 gnat_types_compatible_p (tree t1, tree t2)
3560 {
3561 enum tree_code code;
3562
3563 /* This is the default criterion. */
3564 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3565 return 1;
3566
3567 /* We only check structural equivalence here. */
3568 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3569 return 0;
3570
3571 /* Vector types are also compatible if they have the same number of subparts
3572 and the same form of (scalar) element type. */
3573 if (code == VECTOR_TYPE
3574 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3575 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3576 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3577 return 1;
3578
3579 /* Array types are also compatible if they are constrained and have the same
3580 domain(s), the same component type and the same scalar storage order. */
3581 if (code == ARRAY_TYPE
3582 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3583 || (TYPE_DOMAIN (t1)
3584 && TYPE_DOMAIN (t2)
3585 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3586 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3587 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3588 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3589 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3590 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3591 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3592 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3593 return 1;
3594
3595 return 0;
3596 }
3597
3598 /* Return true if EXPR is a useless type conversion. */
3599
3600 bool
3601 gnat_useless_type_conversion (tree expr)
3602 {
3603 if (CONVERT_EXPR_P (expr)
3604 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3605 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3606 return gnat_types_compatible_p (TREE_TYPE (expr),
3607 TREE_TYPE (TREE_OPERAND (expr, 0)));
3608
3609 return false;
3610 }
3611
3612 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3613
3614 bool
3615 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3616 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3617 {
3618 return TYPE_CI_CO_LIST (t) == cico_list
3619 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3620 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3621 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3622 }
3623 \f
3624 /* EXP is an expression for the size of an object. If this size contains
3625 discriminant references, replace them with the maximum (if MAX_P) or
3626 minimum (if !MAX_P) possible value of the discriminant. */
3627
3628 tree
3629 max_size (tree exp, bool max_p)
3630 {
3631 enum tree_code code = TREE_CODE (exp);
3632 tree type = TREE_TYPE (exp);
3633 tree op0, op1, op2;
3634
3635 switch (TREE_CODE_CLASS (code))
3636 {
3637 case tcc_declaration:
3638 case tcc_constant:
3639 return exp;
3640
3641 case tcc_exceptional:
3642 gcc_assert (code == SSA_NAME);
3643 return exp;
3644
3645 case tcc_vl_exp:
3646 if (code == CALL_EXPR)
3647 {
3648 tree t, *argarray;
3649 int n, i;
3650
3651 t = maybe_inline_call_in_expr (exp);
3652 if (t)
3653 return max_size (t, max_p);
3654
3655 n = call_expr_nargs (exp);
3656 gcc_assert (n > 0);
3657 argarray = XALLOCAVEC (tree, n);
3658 for (i = 0; i < n; i++)
3659 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3660 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3661 }
3662 break;
3663
3664 case tcc_reference:
3665 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3666 modify. Otherwise, we treat it like a variable. */
3667 if (CONTAINS_PLACEHOLDER_P (exp))
3668 {
3669 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3670 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3671 return
3672 convert (type,
3673 max_size (convert (get_base_type (val_type), val), true));
3674 }
3675
3676 return exp;
3677
3678 case tcc_comparison:
3679 return build_int_cst (type, max_p ? 1 : 0);
3680
3681 case tcc_unary:
3682 if (code == NON_LVALUE_EXPR)
3683 return max_size (TREE_OPERAND (exp, 0), max_p);
3684
3685 op0 = max_size (TREE_OPERAND (exp, 0),
3686 code == NEGATE_EXPR ? !max_p : max_p);
3687
3688 if (op0 == TREE_OPERAND (exp, 0))
3689 return exp;
3690
3691 return fold_build1 (code, type, op0);
3692
3693 case tcc_binary:
3694 {
3695 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3696 tree rhs = max_size (TREE_OPERAND (exp, 1),
3697 code == MINUS_EXPR ? !max_p : max_p);
3698
3699 /* Special-case wanting the maximum value of a MIN_EXPR.
3700 In that case, if one side overflows, return the other. */
3701 if (max_p && code == MIN_EXPR)
3702 {
3703 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3704 return lhs;
3705
3706 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3707 return rhs;
3708 }
3709
3710 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3711 overflowing and the RHS a variable. */
3712 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3713 && TREE_CODE (lhs) == INTEGER_CST
3714 && TREE_OVERFLOW (lhs)
3715 && TREE_CODE (rhs) != INTEGER_CST)
3716 return lhs;
3717
3718 /* If we are going to subtract a "negative" value in an unsigned type,
3719 do the operation as an addition of the negated value, in order to
3720 avoid creating a spurious overflow below. */
3721 if (code == MINUS_EXPR
3722 && TYPE_UNSIGNED (type)
3723 && TREE_CODE (rhs) == INTEGER_CST
3724 && !TREE_OVERFLOW (rhs)
3725 && tree_int_cst_sign_bit (rhs) != 0)
3726 {
3727 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3728 code = PLUS_EXPR;
3729 }
3730
3731 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3732 return exp;
3733
3734 /* We need to detect overflows so we call size_binop here. */
3735 return size_binop (code, lhs, rhs);
3736 }
3737
3738 case tcc_expression:
3739 switch (TREE_CODE_LENGTH (code))
3740 {
3741 case 1:
3742 if (code == SAVE_EXPR)
3743 return exp;
3744
3745 op0 = max_size (TREE_OPERAND (exp, 0),
3746 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3747
3748 if (op0 == TREE_OPERAND (exp, 0))
3749 return exp;
3750
3751 return fold_build1 (code, type, op0);
3752
3753 case 2:
3754 if (code == COMPOUND_EXPR)
3755 return max_size (TREE_OPERAND (exp, 1), max_p);
3756
3757 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3758 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3759
3760 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3761 return exp;
3762
3763 return fold_build2 (code, type, op0, op1);
3764
3765 case 3:
3766 if (code == COND_EXPR)
3767 {
3768 op1 = TREE_OPERAND (exp, 1);
3769 op2 = TREE_OPERAND (exp, 2);
3770
3771 if (!op1 || !op2)
3772 return exp;
3773
3774 return
3775 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3776 max_size (op1, max_p), max_size (op2, max_p));
3777 }
3778 break;
3779
3780 default:
3781 break;
3782 }
3783
3784 /* Other tree classes cannot happen. */
3785 default:
3786 break;
3787 }
3788
3789 gcc_unreachable ();
3790 }
3791 \f
3792 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3793 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3794 Return a constructor for the template. */
3795
3796 tree
3797 build_template (tree template_type, tree array_type, tree expr)
3798 {
3799 vec<constructor_elt, va_gc> *template_elts = NULL;
3800 tree bound_list = NULL_TREE;
3801 tree field;
3802
3803 while (TREE_CODE (array_type) == RECORD_TYPE
3804 && (TYPE_PADDING_P (array_type)
3805 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3806 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3807
3808 if (TREE_CODE (array_type) == ARRAY_TYPE
3809 || (TREE_CODE (array_type) == INTEGER_TYPE
3810 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3811 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3812
3813 /* First make the list for a CONSTRUCTOR for the template. Go down the
3814 field list of the template instead of the type chain because this
3815 array might be an Ada array of arrays and we can't tell where the
3816 nested arrays stop being the underlying object. */
3817
3818 for (field = TYPE_FIELDS (template_type); field;
3819 (bound_list
3820 ? (bound_list = TREE_CHAIN (bound_list))
3821 : (array_type = TREE_TYPE (array_type))),
3822 field = DECL_CHAIN (DECL_CHAIN (field)))
3823 {
3824 tree bounds, min, max;
3825
3826 /* If we have a bound list, get the bounds from there. Likewise
3827 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3828 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3829 This will give us a maximum range. */
3830 if (bound_list)
3831 bounds = TREE_VALUE (bound_list);
3832 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3833 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3834 else if (expr && TREE_CODE (expr) == PARM_DECL
3835 && DECL_BY_COMPONENT_PTR_P (expr))
3836 bounds = TREE_TYPE (field);
3837 else
3838 gcc_unreachable ();
3839
3840 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3841 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3842
3843 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3844 substitute it from OBJECT. */
3845 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3846 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3847
3848 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3849 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3850 }
3851
3852 return gnat_build_constructor (template_type, template_elts);
3853 }
3854 \f
3855 /* Return true if TYPE is suitable for the element type of a vector. */
3856
3857 static bool
3858 type_for_vector_element_p (tree type)
3859 {
3860 machine_mode mode;
3861
3862 if (!INTEGRAL_TYPE_P (type)
3863 && !SCALAR_FLOAT_TYPE_P (type)
3864 && !FIXED_POINT_TYPE_P (type))
3865 return false;
3866
3867 mode = TYPE_MODE (type);
3868 if (GET_MODE_CLASS (mode) != MODE_INT
3869 && !SCALAR_FLOAT_MODE_P (mode)
3870 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3871 return false;
3872
3873 return true;
3874 }
3875
3876 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3877 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3878 attribute declaration and want to issue error messages on failure. */
3879
3880 static tree
3881 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3882 {
3883 unsigned HOST_WIDE_INT size_int, inner_size_int;
3884 int nunits;
3885
3886 /* Silently punt on variable sizes. We can't make vector types for them,
3887 need to ignore them on front-end generated subtypes of unconstrained
3888 base types, and this attribute is for binding implementors, not end
3889 users, so we should never get there from legitimate explicit uses. */
3890 if (!tree_fits_uhwi_p (size))
3891 return NULL_TREE;
3892 size_int = tree_to_uhwi (size);
3893
3894 if (!type_for_vector_element_p (inner_type))
3895 {
3896 if (attribute)
3897 error ("invalid element type for attribute %qs",
3898 IDENTIFIER_POINTER (attribute));
3899 return NULL_TREE;
3900 }
3901 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3902
3903 if (size_int % inner_size_int)
3904 {
3905 if (attribute)
3906 error ("vector size not an integral multiple of component size");
3907 return NULL_TREE;
3908 }
3909
3910 if (size_int == 0)
3911 {
3912 if (attribute)
3913 error ("zero vector size");
3914 return NULL_TREE;
3915 }
3916
3917 nunits = size_int / inner_size_int;
3918 if (nunits & (nunits - 1))
3919 {
3920 if (attribute)
3921 error ("number of components of vector not a power of two");
3922 return NULL_TREE;
3923 }
3924
3925 return build_vector_type (inner_type, nunits);
3926 }
3927
3928 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3929 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3930 processing the attribute and want to issue error messages on failure. */
3931
3932 static tree
3933 build_vector_type_for_array (tree array_type, tree attribute)
3934 {
3935 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3936 TYPE_SIZE_UNIT (array_type),
3937 attribute);
3938 if (!vector_type)
3939 return NULL_TREE;
3940
3941 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3942 return vector_type;
3943 }
3944 \f
3945 /* Build a type to be used to represent an aliased object whose nominal type
3946 is an unconstrained array. This consists of a RECORD_TYPE containing a
3947 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3948 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3949 an arbitrary unconstrained object. Use NAME as the name of the record.
3950 DEBUG_INFO_P is true if we need to write debug information for the type. */
3951
3952 tree
3953 build_unc_object_type (tree template_type, tree object_type, tree name,
3954 bool debug_info_p)
3955 {
3956 tree decl;
3957 tree type = make_node (RECORD_TYPE);
3958 tree template_field
3959 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3960 NULL_TREE, NULL_TREE, 0, 1);
3961 tree array_field
3962 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3963 NULL_TREE, NULL_TREE, 0, 1);
3964
3965 TYPE_NAME (type) = name;
3966 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3967 DECL_CHAIN (template_field) = array_field;
3968 finish_record_type (type, template_field, 0, true);
3969
3970 /* Declare it now since it will never be declared otherwise. This is
3971 necessary to ensure that its subtrees are properly marked. */
3972 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3973
3974 /* template_type will not be used elsewhere than here, so to keep the debug
3975 info clean and in order to avoid scoping issues, make decl its
3976 context. */
3977 gnat_set_type_context (template_type, decl);
3978
3979 return type;
3980 }
3981
3982 /* Same, taking a thin or fat pointer type instead of a template type. */
3983
3984 tree
3985 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3986 tree name, bool debug_info_p)
3987 {
3988 tree template_type;
3989
3990 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3991
3992 template_type
3993 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3994 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3995 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3996
3997 return
3998 build_unc_object_type (template_type, object_type, name, debug_info_p);
3999 }
4000 \f
4001 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4002 In the normal case this is just two adjustments, but we have more to
4003 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4004
4005 void
4006 update_pointer_to (tree old_type, tree new_type)
4007 {
4008 tree ptr = TYPE_POINTER_TO (old_type);
4009 tree ref = TYPE_REFERENCE_TO (old_type);
4010 tree t;
4011
4012 /* If this is the main variant, process all the other variants first. */
4013 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4014 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4015 update_pointer_to (t, new_type);
4016
4017 /* If no pointers and no references, we are done. */
4018 if (!ptr && !ref)
4019 return;
4020
4021 /* Merge the old type qualifiers in the new type.
4022
4023 Each old variant has qualifiers for specific reasons, and the new
4024 designated type as well. Each set of qualifiers represents useful
4025 information grabbed at some point, and merging the two simply unifies
4026 these inputs into the final type description.
4027
4028 Consider for instance a volatile type frozen after an access to constant
4029 type designating it; after the designated type's freeze, we get here with
4030 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4031 when the access type was processed. We will make a volatile and readonly
4032 designated type, because that's what it really is.
4033
4034 We might also get here for a non-dummy OLD_TYPE variant with different
4035 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4036 to private record type elaboration (see the comments around the call to
4037 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4038 the qualifiers in those cases too, to avoid accidentally discarding the
4039 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4040 new_type
4041 = build_qualified_type (new_type,
4042 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4043
4044 /* If old type and new type are identical, there is nothing to do. */
4045 if (old_type == new_type)
4046 return;
4047
4048 /* Otherwise, first handle the simple case. */
4049 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4050 {
4051 tree new_ptr, new_ref;
4052
4053 /* If pointer or reference already points to new type, nothing to do.
4054 This can happen as update_pointer_to can be invoked multiple times
4055 on the same couple of types because of the type variants. */
4056 if ((ptr && TREE_TYPE (ptr) == new_type)
4057 || (ref && TREE_TYPE (ref) == new_type))
4058 return;
4059
4060 /* Chain PTR and its variants at the end. */
4061 new_ptr = TYPE_POINTER_TO (new_type);
4062 if (new_ptr)
4063 {
4064 while (TYPE_NEXT_PTR_TO (new_ptr))
4065 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4066 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4067 }
4068 else
4069 TYPE_POINTER_TO (new_type) = ptr;
4070
4071 /* Now adjust them. */
4072 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4073 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4074 {
4075 TREE_TYPE (t) = new_type;
4076 if (TYPE_NULL_BOUNDS (t))
4077 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4078 }
4079
4080 /* Chain REF and its variants at the end. */
4081 new_ref = TYPE_REFERENCE_TO (new_type);
4082 if (new_ref)
4083 {
4084 while (TYPE_NEXT_REF_TO (new_ref))
4085 new_ref = TYPE_NEXT_REF_TO (new_ref);
4086 TYPE_NEXT_REF_TO (new_ref) = ref;
4087 }
4088 else
4089 TYPE_REFERENCE_TO (new_type) = ref;
4090
4091 /* Now adjust them. */
4092 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4093 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4094 TREE_TYPE (t) = new_type;
4095
4096 TYPE_POINTER_TO (old_type) = NULL_TREE;
4097 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4098 }
4099
4100 /* Now deal with the unconstrained array case. In this case the pointer
4101 is actually a record where both fields are pointers to dummy nodes.
4102 Turn them into pointers to the correct types using update_pointer_to.
4103 Likewise for the pointer to the object record (thin pointer). */
4104 else
4105 {
4106 tree new_ptr = TYPE_POINTER_TO (new_type);
4107
4108 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4109
4110 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4111 since update_pointer_to can be invoked multiple times on the same
4112 couple of types because of the type variants. */
4113 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4114 return;
4115
4116 update_pointer_to
4117 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4118 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4119
4120 update_pointer_to
4121 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4122 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4123
4124 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4125 TYPE_OBJECT_RECORD_TYPE (new_type));
4126
4127 TYPE_POINTER_TO (old_type) = NULL_TREE;
4128 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4129 }
4130 }
4131 \f
4132 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4133 unconstrained one. This involves making or finding a template. */
4134
4135 static tree
4136 convert_to_fat_pointer (tree type, tree expr)
4137 {
4138 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4139 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4140 tree etype = TREE_TYPE (expr);
4141 tree template_addr;
4142 vec<constructor_elt, va_gc> *v;
4143 vec_alloc (v, 2);
4144
4145 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4146 array (compare_fat_pointers ensures that this is the full discriminant)
4147 and a valid pointer to the bounds. This latter property is necessary
4148 since the compiler can hoist the load of the bounds done through it. */
4149 if (integer_zerop (expr))
4150 {
4151 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4152 tree null_bounds, t;
4153
4154 if (TYPE_NULL_BOUNDS (ptr_template_type))
4155 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4156 else
4157 {
4158 /* The template type can still be dummy at this point so we build an
4159 empty constructor. The middle-end will fill it in with zeros. */
4160 t = build_constructor (template_type, NULL);
4161 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4162 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4163 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4164 }
4165
4166 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4167 fold_convert (p_array_type, null_pointer_node));
4168 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4169 t = build_constructor (type, v);
4170 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4171 TREE_CONSTANT (t) = 0;
4172 TREE_STATIC (t) = 1;
4173
4174 return t;
4175 }
4176
4177 /* If EXPR is a thin pointer, make template and data from the record. */
4178 if (TYPE_IS_THIN_POINTER_P (etype))
4179 {
4180 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4181
4182 expr = gnat_protect_expr (expr);
4183
4184 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4185 the thin pointer value has been shifted so we shift it back to get
4186 the template address. */
4187 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4188 {
4189 template_addr
4190 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4191 fold_build1 (NEGATE_EXPR, sizetype,
4192 byte_position
4193 (DECL_CHAIN (field))));
4194 template_addr
4195 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4196 template_addr);
4197 }
4198
4199 /* Otherwise we explicitly take the address of the fields. */
4200 else
4201 {
4202 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4203 template_addr
4204 = build_unary_op (ADDR_EXPR, NULL_TREE,
4205 build_component_ref (expr, field, false));
4206 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4207 build_component_ref (expr, DECL_CHAIN (field),
4208 false));
4209 }
4210 }
4211
4212 /* Otherwise, build the constructor for the template. */
4213 else
4214 template_addr
4215 = build_unary_op (ADDR_EXPR, NULL_TREE,
4216 build_template (template_type, TREE_TYPE (etype),
4217 expr));
4218
4219 /* The final result is a constructor for the fat pointer.
4220
4221 If EXPR is an argument of a foreign convention subprogram, the type it
4222 points to is directly the component type. In this case, the expression
4223 type may not match the corresponding FIELD_DECL type at this point, so we
4224 call "convert" here to fix that up if necessary. This type consistency is
4225 required, for instance because it ensures that possible later folding of
4226 COMPONENT_REFs against this constructor always yields something of the
4227 same type as the initial reference.
4228
4229 Note that the call to "build_template" above is still fine because it
4230 will only refer to the provided TEMPLATE_TYPE in this case. */
4231 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4232 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4233 return gnat_build_constructor (type, v);
4234 }
4235 \f
4236 /* Create an expression whose value is that of EXPR,
4237 converted to type TYPE. The TREE_TYPE of the value
4238 is always TYPE. This function implements all reasonable
4239 conversions; callers should filter out those that are
4240 not permitted by the language being compiled. */
4241
4242 tree
4243 convert (tree type, tree expr)
4244 {
4245 tree etype = TREE_TYPE (expr);
4246 enum tree_code ecode = TREE_CODE (etype);
4247 enum tree_code code = TREE_CODE (type);
4248
4249 /* If the expression is already of the right type, we are done. */
4250 if (etype == type)
4251 return expr;
4252
4253 /* If both input and output have padding and are of variable size, do this
4254 as an unchecked conversion. Likewise if one is a mere variant of the
4255 other, so we avoid a pointless unpad/repad sequence. */
4256 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4257 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4258 && (!TREE_CONSTANT (TYPE_SIZE (type))
4259 || !TREE_CONSTANT (TYPE_SIZE (etype))
4260 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4261 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4262 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4263 ;
4264
4265 /* If the output type has padding, convert to the inner type and make a
4266 constructor to build the record, unless a variable size is involved. */
4267 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4268 {
4269 /* If we previously converted from another type and our type is
4270 of variable size, remove the conversion to avoid the need for
4271 variable-sized temporaries. Likewise for a conversion between
4272 original and packable version. */
4273 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4274 && (!TREE_CONSTANT (TYPE_SIZE (type))
4275 || (ecode == RECORD_TYPE
4276 && TYPE_NAME (etype)
4277 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4278 expr = TREE_OPERAND (expr, 0);
4279
4280 /* If we are just removing the padding from expr, convert the original
4281 object if we have variable size in order to avoid the need for some
4282 variable-sized temporaries. Likewise if the padding is a variant
4283 of the other, so we avoid a pointless unpad/repad sequence. */
4284 if (TREE_CODE (expr) == COMPONENT_REF
4285 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4286 && (!TREE_CONSTANT (TYPE_SIZE (type))
4287 || TYPE_MAIN_VARIANT (type)
4288 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4289 || (ecode == RECORD_TYPE
4290 && TYPE_NAME (etype)
4291 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4292 return convert (type, TREE_OPERAND (expr, 0));
4293
4294 /* If the inner type is of self-referential size and the expression type
4295 is a record, do this as an unchecked conversion unless both types are
4296 essentially the same. But first pad the expression if possible to
4297 have the same size on both sides. */
4298 if (ecode == RECORD_TYPE
4299 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4300 && TYPE_MAIN_VARIANT (etype)
4301 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4302 {
4303 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4304 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4305 false, false, false, true),
4306 expr);
4307 return unchecked_convert (type, expr, false);
4308 }
4309
4310 /* If we are converting between array types with variable size, do the
4311 final conversion as an unchecked conversion, again to avoid the need
4312 for some variable-sized temporaries. If valid, this conversion is
4313 very likely purely technical and without real effects. */
4314 if (ecode == ARRAY_TYPE
4315 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4316 && !TREE_CONSTANT (TYPE_SIZE (etype))
4317 && !TREE_CONSTANT (TYPE_SIZE (type)))
4318 return unchecked_convert (type,
4319 convert (TREE_TYPE (TYPE_FIELDS (type)),
4320 expr),
4321 false);
4322
4323 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4324
4325 /* If converting to the inner type has already created a CONSTRUCTOR with
4326 the right size, then reuse it instead of creating another one. This
4327 can happen for the padding type built to overalign local variables. */
4328 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4329 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4330 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4331 && tree_int_cst_equal (TYPE_SIZE (type),
4332 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4333 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4334
4335 vec<constructor_elt, va_gc> *v;
4336 vec_alloc (v, 1);
4337 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4338 return gnat_build_constructor (type, v);
4339 }
4340
4341 /* If the input type has padding, remove it and convert to the output type.
4342 The conditions ordering is arranged to ensure that the output type is not
4343 a padding type here, as it is not clear whether the conversion would
4344 always be correct if this was to happen. */
4345 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4346 {
4347 tree unpadded;
4348
4349 /* If we have just converted to this padded type, just get the
4350 inner expression. */
4351 if (TREE_CODE (expr) == CONSTRUCTOR)
4352 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4353
4354 /* Otherwise, build an explicit component reference. */
4355 else
4356 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4357
4358 return convert (type, unpadded);
4359 }
4360
4361 /* If the input is a biased type, convert first to the base type and add
4362 the bias. Note that the bias must go through a full conversion to the
4363 base type, lest it is itself a biased value; this happens for subtypes
4364 of biased types. */
4365 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4366 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4367 fold_convert (TREE_TYPE (etype), expr),
4368 convert (TREE_TYPE (etype),
4369 TYPE_MIN_VALUE (etype))));
4370
4371 /* If the input is a justified modular type, we need to extract the actual
4372 object before converting it to any other type with the exceptions of an
4373 unconstrained array or of a mere type variant. It is useful to avoid the
4374 extraction and conversion in the type variant case because it could end
4375 up replacing a VAR_DECL expr by a constructor and we might be about the
4376 take the address of the result. */
4377 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4378 && code != UNCONSTRAINED_ARRAY_TYPE
4379 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4380 return
4381 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4382
4383 /* If converting to a type that contains a template, convert to the data
4384 type and then build the template. */
4385 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4386 {
4387 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4388 vec<constructor_elt, va_gc> *v;
4389 vec_alloc (v, 2);
4390
4391 /* If the source already has a template, get a reference to the
4392 associated array only, as we are going to rebuild a template
4393 for the target type anyway. */
4394 expr = maybe_unconstrained_array (expr);
4395
4396 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4397 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4398 obj_type, NULL_TREE));
4399 if (expr)
4400 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4401 convert (obj_type, expr));
4402 return gnat_build_constructor (type, v);
4403 }
4404
4405 /* There are some cases of expressions that we process specially. */
4406 switch (TREE_CODE (expr))
4407 {
4408 case ERROR_MARK:
4409 return expr;
4410
4411 case NULL_EXPR:
4412 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4413 conversion in gnat_expand_expr. NULL_EXPR does not represent
4414 and actual value, so no conversion is needed. */
4415 expr = copy_node (expr);
4416 TREE_TYPE (expr) = type;
4417 return expr;
4418
4419 case STRING_CST:
4420 /* If we are converting a STRING_CST to another constrained array type,
4421 just make a new one in the proper type. */
4422 if (code == ecode && AGGREGATE_TYPE_P (etype)
4423 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4424 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4425 {
4426 expr = copy_node (expr);
4427 TREE_TYPE (expr) = type;
4428 return expr;
4429 }
4430 break;
4431
4432 case VECTOR_CST:
4433 /* If we are converting a VECTOR_CST to a mere type variant, just make
4434 a new one in the proper type. */
4435 if (code == ecode && gnat_types_compatible_p (type, etype))
4436 {
4437 expr = copy_node (expr);
4438 TREE_TYPE (expr) = type;
4439 return expr;
4440 }
4441 break;
4442
4443 case CONSTRUCTOR:
4444 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4445 another padding type around the same type, just make a new one in
4446 the proper type. */
4447 if (code == ecode
4448 && (gnat_types_compatible_p (type, etype)
4449 || (code == RECORD_TYPE
4450 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4451 && TREE_TYPE (TYPE_FIELDS (type))
4452 == TREE_TYPE (TYPE_FIELDS (etype)))))
4453 {
4454 expr = copy_node (expr);
4455 TREE_TYPE (expr) = type;
4456 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4457 return expr;
4458 }
4459
4460 /* Likewise for a conversion between original and packable version, or
4461 conversion between types of the same size and with the same list of
4462 fields, but we have to work harder to preserve type consistency. */
4463 if (code == ecode
4464 && code == RECORD_TYPE
4465 && (TYPE_NAME (type) == TYPE_NAME (etype)
4466 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4467
4468 {
4469 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4470 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4471 vec<constructor_elt, va_gc> *v;
4472 vec_alloc (v, len);
4473 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4474 unsigned HOST_WIDE_INT idx;
4475 tree index, value;
4476
4477 /* Whether we need to clear TREE_CONSTANT et al. on the output
4478 constructor when we convert in place. */
4479 bool clear_constant = false;
4480
4481 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4482 {
4483 /* Skip the missing fields in the CONSTRUCTOR. */
4484 while (efield && field && !SAME_FIELD_P (efield, index))
4485 {
4486 efield = DECL_CHAIN (efield);
4487 field = DECL_CHAIN (field);
4488 }
4489 /* The field must be the same. */
4490 if (!(efield && field && SAME_FIELD_P (efield, field)))
4491 break;
4492 constructor_elt elt
4493 = {field, convert (TREE_TYPE (field), value)};
4494 v->quick_push (elt);
4495
4496 /* If packing has made this field a bitfield and the input
4497 value couldn't be emitted statically any more, we need to
4498 clear TREE_CONSTANT on our output. */
4499 if (!clear_constant
4500 && TREE_CONSTANT (expr)
4501 && !CONSTRUCTOR_BITFIELD_P (efield)
4502 && CONSTRUCTOR_BITFIELD_P (field)
4503 && !initializer_constant_valid_for_bitfield_p (value))
4504 clear_constant = true;
4505
4506 efield = DECL_CHAIN (efield);
4507 field = DECL_CHAIN (field);
4508 }
4509
4510 /* If we have been able to match and convert all the input fields
4511 to their output type, convert in place now. We'll fallback to a
4512 view conversion downstream otherwise. */
4513 if (idx == len)
4514 {
4515 expr = copy_node (expr);
4516 TREE_TYPE (expr) = type;
4517 CONSTRUCTOR_ELTS (expr) = v;
4518 if (clear_constant)
4519 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4520 return expr;
4521 }
4522 }
4523
4524 /* Likewise for a conversion between array type and vector type with a
4525 compatible representative array. */
4526 else if (code == VECTOR_TYPE
4527 && ecode == ARRAY_TYPE
4528 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4529 etype))
4530 {
4531 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4532 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4533 vec<constructor_elt, va_gc> *v;
4534 unsigned HOST_WIDE_INT ix;
4535 tree value;
4536
4537 /* Build a VECTOR_CST from a *constant* array constructor. */
4538 if (TREE_CONSTANT (expr))
4539 {
4540 bool constant_p = true;
4541
4542 /* Iterate through elements and check if all constructor
4543 elements are *_CSTs. */
4544 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4545 if (!CONSTANT_CLASS_P (value))
4546 {
4547 constant_p = false;
4548 break;
4549 }
4550
4551 if (constant_p)
4552 return build_vector_from_ctor (type,
4553 CONSTRUCTOR_ELTS (expr));
4554 }
4555
4556 /* Otherwise, build a regular vector constructor. */
4557 vec_alloc (v, len);
4558 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4559 {
4560 constructor_elt elt = {NULL_TREE, value};
4561 v->quick_push (elt);
4562 }
4563 expr = copy_node (expr);
4564 TREE_TYPE (expr) = type;
4565 CONSTRUCTOR_ELTS (expr) = v;
4566 return expr;
4567 }
4568 break;
4569
4570 case UNCONSTRAINED_ARRAY_REF:
4571 /* First retrieve the underlying array. */
4572 expr = maybe_unconstrained_array (expr);
4573 etype = TREE_TYPE (expr);
4574 ecode = TREE_CODE (etype);
4575 break;
4576
4577 case VIEW_CONVERT_EXPR:
4578 {
4579 /* GCC 4.x is very sensitive to type consistency overall, and view
4580 conversions thus are very frequent. Even though just "convert"ing
4581 the inner operand to the output type is fine in most cases, it
4582 might expose unexpected input/output type mismatches in special
4583 circumstances so we avoid such recursive calls when we can. */
4584 tree op0 = TREE_OPERAND (expr, 0);
4585
4586 /* If we are converting back to the original type, we can just
4587 lift the input conversion. This is a common occurrence with
4588 switches back-and-forth amongst type variants. */
4589 if (type == TREE_TYPE (op0))
4590 return op0;
4591
4592 /* Otherwise, if we're converting between two aggregate or vector
4593 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4594 target type in place or to just convert the inner expression. */
4595 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4596 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4597 {
4598 /* If we are converting between mere variants, we can just
4599 substitute the VIEW_CONVERT_EXPR in place. */
4600 if (gnat_types_compatible_p (type, etype))
4601 return build1 (VIEW_CONVERT_EXPR, type, op0);
4602
4603 /* Otherwise, we may just bypass the input view conversion unless
4604 one of the types is a fat pointer, which is handled by
4605 specialized code below which relies on exact type matching. */
4606 else if (!TYPE_IS_FAT_POINTER_P (type)
4607 && !TYPE_IS_FAT_POINTER_P (etype))
4608 return convert (type, op0);
4609 }
4610
4611 break;
4612 }
4613
4614 default:
4615 break;
4616 }
4617
4618 /* Check for converting to a pointer to an unconstrained array. */
4619 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4620 return convert_to_fat_pointer (type, expr);
4621
4622 /* If we are converting between two aggregate or vector types that are mere
4623 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4624 to a vector type from its representative array type. */
4625 else if ((code == ecode
4626 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4627 && gnat_types_compatible_p (type, etype))
4628 || (code == VECTOR_TYPE
4629 && ecode == ARRAY_TYPE
4630 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4631 etype)))
4632 return build1 (VIEW_CONVERT_EXPR, type, expr);
4633
4634 /* If we are converting between tagged types, try to upcast properly.
4635 But don't do it if we are just annotating types since tagged types
4636 aren't fully laid out in this mode. */
4637 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4638 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type)
4639 && !type_annotate_only)
4640 {
4641 tree child_etype = etype;
4642 do {
4643 tree field = TYPE_FIELDS (child_etype);
4644 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4645 return build_component_ref (expr, field, false);
4646 child_etype = TREE_TYPE (field);
4647 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4648 }
4649
4650 /* If we are converting from a smaller form of record type back to it, just
4651 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4652 size on both sides. */
4653 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4654 && smaller_form_type_p (etype, type))
4655 {
4656 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4657 false, false, false, true),
4658 expr);
4659 return build1 (VIEW_CONVERT_EXPR, type, expr);
4660 }
4661
4662 /* In all other cases of related types, make a NOP_EXPR. */
4663 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4664 return fold_convert (type, expr);
4665
4666 switch (code)
4667 {
4668 case VOID_TYPE:
4669 return fold_build1 (CONVERT_EXPR, type, expr);
4670
4671 case INTEGER_TYPE:
4672 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4673 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4674 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4675 return unchecked_convert (type, expr, false);
4676
4677 /* If the output is a biased type, convert first to the base type and
4678 subtract the bias. Note that the bias itself must go through a full
4679 conversion to the base type, lest it is a biased value; this happens
4680 for subtypes of biased types. */
4681 if (TYPE_BIASED_REPRESENTATION_P (type))
4682 return fold_convert (type,
4683 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4684 convert (TREE_TYPE (type), expr),
4685 convert (TREE_TYPE (type),
4686 TYPE_MIN_VALUE (type))));
4687
4688 /* ... fall through ... */
4689
4690 case ENUMERAL_TYPE:
4691 case BOOLEAN_TYPE:
4692 /* If we are converting an additive expression to an integer type
4693 with lower precision, be wary of the optimization that can be
4694 applied by convert_to_integer. There are 2 problematic cases:
4695 - if the first operand was originally of a biased type,
4696 because we could be recursively called to convert it
4697 to an intermediate type and thus rematerialize the
4698 additive operator endlessly,
4699 - if the expression contains a placeholder, because an
4700 intermediate conversion that changes the sign could
4701 be inserted and thus introduce an artificial overflow
4702 at compile time when the placeholder is substituted. */
4703 if (code == INTEGER_TYPE
4704 && ecode == INTEGER_TYPE
4705 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4706 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4707 {
4708 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4709
4710 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4711 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4712 || CONTAINS_PLACEHOLDER_P (expr))
4713 return build1 (NOP_EXPR, type, expr);
4714 }
4715
4716 return fold (convert_to_integer (type, expr));
4717
4718 case POINTER_TYPE:
4719 case REFERENCE_TYPE:
4720 /* If converting between two thin pointers, adjust if needed to account
4721 for differing offsets from the base pointer, depending on whether
4722 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4723 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4724 {
4725 tree etype_pos
4726 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4727 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4728 : size_zero_node;
4729 tree type_pos
4730 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4731 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4732 : size_zero_node;
4733 tree byte_diff = size_diffop (type_pos, etype_pos);
4734
4735 expr = build1 (NOP_EXPR, type, expr);
4736 if (integer_zerop (byte_diff))
4737 return expr;
4738
4739 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4740 fold_convert (sizetype, byte_diff));
4741 }
4742
4743 /* If converting fat pointer to normal or thin pointer, get the pointer
4744 to the array and then convert it. */
4745 if (TYPE_IS_FAT_POINTER_P (etype))
4746 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4747
4748 return fold (convert_to_pointer (type, expr));
4749
4750 case REAL_TYPE:
4751 return fold (convert_to_real (type, expr));
4752
4753 case RECORD_TYPE:
4754 /* Do a normal conversion between scalar and justified modular type. */
4755 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4756 {
4757 vec<constructor_elt, va_gc> *v;
4758 vec_alloc (v, 1);
4759
4760 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4761 convert (TREE_TYPE (TYPE_FIELDS (type)),
4762 expr));
4763 return gnat_build_constructor (type, v);
4764 }
4765
4766 /* In these cases, assume the front-end has validated the conversion.
4767 If the conversion is valid, it will be a bit-wise conversion, so
4768 it can be viewed as an unchecked conversion. */
4769 return unchecked_convert (type, expr, false);
4770
4771 case ARRAY_TYPE:
4772 /* Do a normal conversion between unconstrained and constrained array
4773 type, assuming the latter is a constrained version of the former. */
4774 if (TREE_CODE (expr) == INDIRECT_REF
4775 && ecode == ARRAY_TYPE
4776 && TREE_TYPE (etype) == TREE_TYPE (type))
4777 {
4778 tree ptr_type = build_pointer_type (type);
4779 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4780 fold_convert (ptr_type,
4781 TREE_OPERAND (expr, 0)));
4782 TREE_READONLY (t) = TREE_READONLY (expr);
4783 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4784 return t;
4785 }
4786
4787 /* In these cases, assume the front-end has validated the conversion.
4788 If the conversion is valid, it will be a bit-wise conversion, so
4789 it can be viewed as an unchecked conversion. */
4790 return unchecked_convert (type, expr, false);
4791
4792 case UNION_TYPE:
4793 /* This is a either a conversion between a tagged type and some
4794 subtype, which we have to mark as a UNION_TYPE because of
4795 overlapping fields or a conversion of an Unchecked_Union. */
4796 return unchecked_convert (type, expr, false);
4797
4798 case UNCONSTRAINED_ARRAY_TYPE:
4799 /* If the input is a VECTOR_TYPE, convert to the representative
4800 array type first. */
4801 if (ecode == VECTOR_TYPE)
4802 {
4803 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4804 etype = TREE_TYPE (expr);
4805 ecode = TREE_CODE (etype);
4806 }
4807
4808 /* If EXPR is a constrained array, take its address, convert it to a
4809 fat pointer, and then dereference it. Likewise if EXPR is a
4810 record containing both a template and a constrained array.
4811 Note that a record representing a justified modular type
4812 always represents a packed constrained array. */
4813 if (ecode == ARRAY_TYPE
4814 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4815 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4816 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4817 return
4818 build_unary_op
4819 (INDIRECT_REF, NULL_TREE,
4820 convert_to_fat_pointer (TREE_TYPE (type),
4821 build_unary_op (ADDR_EXPR,
4822 NULL_TREE, expr)));
4823
4824 /* Do something very similar for converting one unconstrained
4825 array to another. */
4826 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4827 return
4828 build_unary_op (INDIRECT_REF, NULL_TREE,
4829 convert (TREE_TYPE (type),
4830 build_unary_op (ADDR_EXPR,
4831 NULL_TREE, expr)));
4832 else
4833 gcc_unreachable ();
4834
4835 case COMPLEX_TYPE:
4836 return fold (convert_to_complex (type, expr));
4837
4838 default:
4839 gcc_unreachable ();
4840 }
4841 }
4842
4843 /* Create an expression whose value is that of EXPR converted to the common
4844 index type, which is sizetype. EXPR is supposed to be in the base type
4845 of the GNAT index type. Calling it is equivalent to doing
4846
4847 convert (sizetype, expr)
4848
4849 but we try to distribute the type conversion with the knowledge that EXPR
4850 cannot overflow in its type. This is a best-effort approach and we fall
4851 back to the above expression as soon as difficulties are encountered.
4852
4853 This is necessary to overcome issues that arise when the GNAT base index
4854 type and the GCC common index type (sizetype) don't have the same size,
4855 which is quite frequent on 64-bit architectures. In this case, and if
4856 the GNAT base index type is signed but the iteration type of the loop has
4857 been forced to unsigned, the loop scalar evolution engine cannot compute
4858 a simple evolution for the general induction variables associated with the
4859 array indices, because it will preserve the wrap-around semantics in the
4860 unsigned type of their "inner" part. As a result, many loop optimizations
4861 are blocked.
4862
4863 The solution is to use a special (basic) induction variable that is at
4864 least as large as sizetype, and to express the aforementioned general
4865 induction variables in terms of this induction variable, eliminating
4866 the problematic intermediate truncation to the GNAT base index type.
4867 This is possible as long as the original expression doesn't overflow
4868 and if the middle-end hasn't introduced artificial overflows in the
4869 course of the various simplification it can make to the expression. */
4870
4871 tree
4872 convert_to_index_type (tree expr)
4873 {
4874 enum tree_code code = TREE_CODE (expr);
4875 tree type = TREE_TYPE (expr);
4876
4877 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4878 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4879 if (TYPE_UNSIGNED (type) || !optimize)
4880 return convert (sizetype, expr);
4881
4882 switch (code)
4883 {
4884 case VAR_DECL:
4885 /* The main effect of the function: replace a loop parameter with its
4886 associated special induction variable. */
4887 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4888 expr = DECL_INDUCTION_VAR (expr);
4889 break;
4890
4891 CASE_CONVERT:
4892 {
4893 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4894 /* Bail out as soon as we suspect some sort of type frobbing. */
4895 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4896 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4897 break;
4898 }
4899
4900 /* ... fall through ... */
4901
4902 case NON_LVALUE_EXPR:
4903 return fold_build1 (code, sizetype,
4904 convert_to_index_type (TREE_OPERAND (expr, 0)));
4905
4906 case PLUS_EXPR:
4907 case MINUS_EXPR:
4908 case MULT_EXPR:
4909 return fold_build2 (code, sizetype,
4910 convert_to_index_type (TREE_OPERAND (expr, 0)),
4911 convert_to_index_type (TREE_OPERAND (expr, 1)));
4912
4913 case COMPOUND_EXPR:
4914 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4915 convert_to_index_type (TREE_OPERAND (expr, 1)));
4916
4917 case COND_EXPR:
4918 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4919 convert_to_index_type (TREE_OPERAND (expr, 1)),
4920 convert_to_index_type (TREE_OPERAND (expr, 2)));
4921
4922 default:
4923 break;
4924 }
4925
4926 return convert (sizetype, expr);
4927 }
4928 \f
4929 /* Remove all conversions that are done in EXP. This includes converting
4930 from a padded type or to a justified modular type. If TRUE_ADDRESS
4931 is true, always return the address of the containing object even if
4932 the address is not bit-aligned. */
4933
4934 tree
4935 remove_conversions (tree exp, bool true_address)
4936 {
4937 switch (TREE_CODE (exp))
4938 {
4939 case CONSTRUCTOR:
4940 if (true_address
4941 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4942 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4943 return
4944 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4945 break;
4946
4947 case COMPONENT_REF:
4948 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4949 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4950 break;
4951
4952 CASE_CONVERT:
4953 case VIEW_CONVERT_EXPR:
4954 case NON_LVALUE_EXPR:
4955 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4956
4957 default:
4958 break;
4959 }
4960
4961 return exp;
4962 }
4963 \f
4964 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4965 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4966 likewise return an expression pointing to the underlying array. */
4967
4968 tree
4969 maybe_unconstrained_array (tree exp)
4970 {
4971 enum tree_code code = TREE_CODE (exp);
4972 tree type = TREE_TYPE (exp);
4973
4974 switch (TREE_CODE (type))
4975 {
4976 case UNCONSTRAINED_ARRAY_TYPE:
4977 if (code == UNCONSTRAINED_ARRAY_REF)
4978 {
4979 const bool read_only = TREE_READONLY (exp);
4980 const bool no_trap = TREE_THIS_NOTRAP (exp);
4981
4982 exp = TREE_OPERAND (exp, 0);
4983 type = TREE_TYPE (exp);
4984
4985 if (TREE_CODE (exp) == COND_EXPR)
4986 {
4987 tree op1
4988 = build_unary_op (INDIRECT_REF, NULL_TREE,
4989 build_component_ref (TREE_OPERAND (exp, 1),
4990 TYPE_FIELDS (type),
4991 false));
4992 tree op2
4993 = build_unary_op (INDIRECT_REF, NULL_TREE,
4994 build_component_ref (TREE_OPERAND (exp, 2),
4995 TYPE_FIELDS (type),
4996 false));
4997
4998 exp = build3 (COND_EXPR,
4999 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5000 TREE_OPERAND (exp, 0), op1, op2);
5001 }
5002 else
5003 {
5004 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5005 build_component_ref (exp,
5006 TYPE_FIELDS (type),
5007 false));
5008 TREE_READONLY (exp) = read_only;
5009 TREE_THIS_NOTRAP (exp) = no_trap;
5010 }
5011 }
5012
5013 else if (code == NULL_EXPR)
5014 exp = build1 (NULL_EXPR,
5015 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5016 TREE_OPERAND (exp, 0));
5017 break;
5018
5019 case RECORD_TYPE:
5020 /* If this is a padded type and it contains a template, convert to the
5021 unpadded type first. */
5022 if (TYPE_PADDING_P (type)
5023 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5024 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5025 {
5026 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5027 code = TREE_CODE (exp);
5028 type = TREE_TYPE (exp);
5029 }
5030
5031 if (TYPE_CONTAINS_TEMPLATE_P (type))
5032 {
5033 /* If the array initializer is a box, return NULL_TREE. */
5034 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5035 return NULL_TREE;
5036
5037 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5038 false);
5039 type = TREE_TYPE (exp);
5040
5041 /* If the array type is padded, convert to the unpadded type. */
5042 if (TYPE_IS_PADDING_P (type))
5043 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5044 }
5045 break;
5046
5047 default:
5048 break;
5049 }
5050
5051 return exp;
5052 }
5053 \f
5054 /* Return true if EXPR is an expression that can be folded as an operand
5055 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5056
5057 static bool
5058 can_fold_for_view_convert_p (tree expr)
5059 {
5060 tree t1, t2;
5061
5062 /* The folder will fold NOP_EXPRs between integral types with the same
5063 precision (in the middle-end's sense). We cannot allow it if the
5064 types don't have the same precision in the Ada sense as well. */
5065 if (TREE_CODE (expr) != NOP_EXPR)
5066 return true;
5067
5068 t1 = TREE_TYPE (expr);
5069 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5070
5071 /* Defer to the folder for non-integral conversions. */
5072 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5073 return true;
5074
5075 /* Only fold conversions that preserve both precisions. */
5076 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5077 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5078 return true;
5079
5080 return false;
5081 }
5082
5083 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5084 If NOTRUNC_P is true, truncation operations should be suppressed.
5085
5086 Special care is required with (source or target) integral types whose
5087 precision is not equal to their size, to make sure we fetch or assign
5088 the value bits whose location might depend on the endianness, e.g.
5089
5090 Rmsize : constant := 8;
5091 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5092
5093 type Bit_Array is array (1 .. Rmsize) of Boolean;
5094 pragma Pack (Bit_Array);
5095
5096 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5097
5098 Value : Int := 2#1000_0001#;
5099 Vbits : Bit_Array := To_Bit_Array (Value);
5100
5101 we expect the 8 bits at Vbits'Address to always contain Value, while
5102 their original location depends on the endianness, at Value'Address
5103 on a little-endian architecture but not on a big-endian one.
5104
5105 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5106 the bits between the precision and the size are filled, because of the
5107 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5108 So we use the special predicate type_unsigned_for_rm above. */
5109
5110 tree
5111 unchecked_convert (tree type, tree expr, bool notrunc_p)
5112 {
5113 tree etype = TREE_TYPE (expr);
5114 enum tree_code ecode = TREE_CODE (etype);
5115 enum tree_code code = TREE_CODE (type);
5116 tree tem;
5117 int c;
5118
5119 /* If the expression is already of the right type, we are done. */
5120 if (etype == type)
5121 return expr;
5122
5123 /* If both types are integral just do a normal conversion.
5124 Likewise for a conversion to an unconstrained array. */
5125 if (((INTEGRAL_TYPE_P (type)
5126 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5127 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5128 && (INTEGRAL_TYPE_P (etype)
5129 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5130 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5131 || code == UNCONSTRAINED_ARRAY_TYPE)
5132 {
5133 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5134 {
5135 tree ntype = copy_type (etype);
5136 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5137 TYPE_MAIN_VARIANT (ntype) = ntype;
5138 expr = build1 (NOP_EXPR, ntype, expr);
5139 }
5140
5141 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5142 {
5143 tree rtype = copy_type (type);
5144 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5145 TYPE_MAIN_VARIANT (rtype) = rtype;
5146 expr = convert (rtype, expr);
5147 expr = build1 (NOP_EXPR, type, expr);
5148 }
5149 else
5150 expr = convert (type, expr);
5151 }
5152
5153 /* If we are converting to an integral type whose precision is not equal
5154 to its size, first unchecked convert to a record type that contains a
5155 field of the given precision. Then extract the result from the field.
5156
5157 There is a subtlety if the source type is an aggregate type with reverse
5158 storage order because its representation is not contiguous in the native
5159 storage order, i.e. a direct unchecked conversion to an integral type
5160 with N bits of precision cannot read the first N bits of the aggregate
5161 type. To overcome it, we do an unchecked conversion to an integral type
5162 with reverse storage order and return the resulting value. This also
5163 ensures that the result of the unchecked conversion doesn't depend on
5164 the endianness of the target machine, but only on the storage order of
5165 the aggregate type.
5166
5167 Finally, for the sake of consistency, we do the unchecked conversion
5168 to an integral type with reverse storage order as soon as the source
5169 type is an aggregate type with reverse storage order, even if there
5170 are no considerations of precision or size involved. */
5171 else if (INTEGRAL_TYPE_P (type)
5172 && TYPE_RM_SIZE (type)
5173 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5174 TYPE_SIZE (type)) < 0
5175 || (AGGREGATE_TYPE_P (etype)
5176 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5177 {
5178 tree rec_type = make_node (RECORD_TYPE);
5179 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5180 tree field_type, field;
5181
5182 if (AGGREGATE_TYPE_P (etype))
5183 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5184 = TYPE_REVERSE_STORAGE_ORDER (etype);
5185
5186 if (type_unsigned_for_rm (type))
5187 field_type = make_unsigned_type (prec);
5188 else
5189 field_type = make_signed_type (prec);
5190 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5191
5192 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5193 NULL_TREE, bitsize_zero_node, 1, 0);
5194
5195 finish_record_type (rec_type, field, 1, false);
5196
5197 expr = unchecked_convert (rec_type, expr, notrunc_p);
5198 expr = build_component_ref (expr, field, false);
5199 expr = fold_build1 (NOP_EXPR, type, expr);
5200 }
5201
5202 /* Similarly if we are converting from an integral type whose precision is
5203 not equal to its size, first copy into a field of the given precision
5204 and unchecked convert the record type.
5205
5206 The same considerations as above apply if the target type is an aggregate
5207 type with reverse storage order and we also proceed similarly. */
5208 else if (INTEGRAL_TYPE_P (etype)
5209 && TYPE_RM_SIZE (etype)
5210 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5211 TYPE_SIZE (etype)) < 0
5212 || (AGGREGATE_TYPE_P (type)
5213 && TYPE_REVERSE_STORAGE_ORDER (type))))
5214 {
5215 tree rec_type = make_node (RECORD_TYPE);
5216 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5217 vec<constructor_elt, va_gc> *v;
5218 vec_alloc (v, 1);
5219 tree field_type, field;
5220
5221 if (AGGREGATE_TYPE_P (type))
5222 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5223 = TYPE_REVERSE_STORAGE_ORDER (type);
5224
5225 if (type_unsigned_for_rm (etype))
5226 field_type = make_unsigned_type (prec);
5227 else
5228 field_type = make_signed_type (prec);
5229 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5230
5231 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5232 NULL_TREE, bitsize_zero_node, 1, 0);
5233
5234 finish_record_type (rec_type, field, 1, false);
5235
5236 expr = fold_build1 (NOP_EXPR, field_type, expr);
5237 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5238 expr = gnat_build_constructor (rec_type, v);
5239 expr = unchecked_convert (type, expr, notrunc_p);
5240 }
5241
5242 /* If we are converting from a scalar type to a type with a different size,
5243 we need to pad to have the same size on both sides.
5244
5245 ??? We cannot do it unconditionally because unchecked conversions are
5246 used liberally by the front-end to implement polymorphism, e.g. in:
5247
5248 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5249 return p___size__4 (p__object!(S191s.all));
5250
5251 so we skip all expressions that are references. */
5252 else if (!REFERENCE_CLASS_P (expr)
5253 && !AGGREGATE_TYPE_P (etype)
5254 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5255 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5256 {
5257 if (c < 0)
5258 {
5259 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5260 false, false, false, true),
5261 expr);
5262 expr = unchecked_convert (type, expr, notrunc_p);
5263 }
5264 else
5265 {
5266 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5267 false, false, false, true);
5268 expr = unchecked_convert (rec_type, expr, notrunc_p);
5269 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5270 }
5271 }
5272
5273 /* We have a special case when we are converting between two unconstrained
5274 array types. In that case, take the address, convert the fat pointer
5275 types, and dereference. */
5276 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5277 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5278 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5279 build_unary_op (ADDR_EXPR, NULL_TREE,
5280 expr)));
5281
5282 /* Another special case is when we are converting to a vector type from its
5283 representative array type; this a regular conversion. */
5284 else if (code == VECTOR_TYPE
5285 && ecode == ARRAY_TYPE
5286 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5287 etype))
5288 expr = convert (type, expr);
5289
5290 /* And, if the array type is not the representative, we try to build an
5291 intermediate vector type of which the array type is the representative
5292 and to do the unchecked conversion between the vector types, in order
5293 to enable further simplifications in the middle-end. */
5294 else if (code == VECTOR_TYPE
5295 && ecode == ARRAY_TYPE
5296 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5297 {
5298 expr = convert (tem, expr);
5299 return unchecked_convert (type, expr, notrunc_p);
5300 }
5301
5302 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5303 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5304 else if (TREE_CODE (expr) == CONSTRUCTOR
5305 && code == RECORD_TYPE
5306 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5307 {
5308 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5309 Empty, false, false, false, true),
5310 expr);
5311 return unchecked_convert (type, expr, notrunc_p);
5312 }
5313
5314 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5315 else
5316 {
5317 expr = maybe_unconstrained_array (expr);
5318 etype = TREE_TYPE (expr);
5319 ecode = TREE_CODE (etype);
5320 if (can_fold_for_view_convert_p (expr))
5321 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5322 else
5323 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5324 }
5325
5326 /* If the result is a non-biased integral type whose precision is not equal
5327 to its size, sign- or zero-extend the result. But we need not do this
5328 if the input is also an integral type and both are unsigned or both are
5329 signed and have the same precision. */
5330 if (!notrunc_p
5331 && INTEGRAL_TYPE_P (type)
5332 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5333 && TYPE_RM_SIZE (type)
5334 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5335 && !(INTEGRAL_TYPE_P (etype)
5336 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5337 && (type_unsigned_for_rm (type)
5338 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5339 TYPE_RM_SIZE (etype)
5340 ? TYPE_RM_SIZE (etype)
5341 : TYPE_SIZE (etype)) == 0)))
5342 {
5343 if (integer_zerop (TYPE_RM_SIZE (type)))
5344 expr = build_int_cst (type, 0);
5345 else
5346 {
5347 tree base_type
5348 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5349 type_unsigned_for_rm (type));
5350 tree shift_expr
5351 = convert (base_type,
5352 size_binop (MINUS_EXPR,
5353 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5354 expr
5355 = convert (type,
5356 build_binary_op (RSHIFT_EXPR, base_type,
5357 build_binary_op (LSHIFT_EXPR, base_type,
5358 convert (base_type,
5359 expr),
5360 shift_expr),
5361 shift_expr));
5362 }
5363 }
5364
5365 /* An unchecked conversion should never raise Constraint_Error. The code
5366 below assumes that GCC's conversion routines overflow the same way that
5367 the underlying hardware does. This is probably true. In the rare case
5368 when it is false, we can rely on the fact that such conversions are
5369 erroneous anyway. */
5370 if (TREE_CODE (expr) == INTEGER_CST)
5371 TREE_OVERFLOW (expr) = 0;
5372
5373 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5374 show no longer constant. */
5375 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5376 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5377 OEP_ONLY_CONST))
5378 TREE_CONSTANT (expr) = 0;
5379
5380 return expr;
5381 }
5382 \f
5383 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5384 the latter being a record type as predicated by Is_Record_Type. */
5385
5386 enum tree_code
5387 tree_code_for_record_type (Entity_Id gnat_type)
5388 {
5389 Node_Id component_list, component;
5390
5391 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5392 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5393 if (!Is_Unchecked_Union (gnat_type))
5394 return RECORD_TYPE;
5395
5396 gnat_type = Implementation_Base_Type (gnat_type);
5397 component_list
5398 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5399
5400 for (component = First_Non_Pragma (Component_Items (component_list));
5401 Present (component);
5402 component = Next_Non_Pragma (component))
5403 if (Ekind (Defining_Entity (component)) == E_Component)
5404 return RECORD_TYPE;
5405
5406 return UNION_TYPE;
5407 }
5408
5409 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5410 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5411 according to the presence of an alignment clause on the type or, if it
5412 is an array, on the component type. */
5413
5414 bool
5415 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5416 {
5417 gnat_type = Underlying_Type (gnat_type);
5418
5419 *align_clause = Present (Alignment_Clause (gnat_type));
5420
5421 if (Is_Array_Type (gnat_type))
5422 {
5423 gnat_type = Underlying_Type (Component_Type (gnat_type));
5424 if (Present (Alignment_Clause (gnat_type)))
5425 *align_clause = true;
5426 }
5427
5428 if (!Is_Floating_Point_Type (gnat_type))
5429 return false;
5430
5431 if (UI_To_Int (Esize (gnat_type)) != 64)
5432 return false;
5433
5434 return true;
5435 }
5436
5437 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5438 size is greater or equal to 64 bits, or an array of such a type. Set
5439 ALIGN_CLAUSE according to the presence of an alignment clause on the
5440 type or, if it is an array, on the component type. */
5441
5442 bool
5443 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5444 {
5445 gnat_type = Underlying_Type (gnat_type);
5446
5447 *align_clause = Present (Alignment_Clause (gnat_type));
5448
5449 if (Is_Array_Type (gnat_type))
5450 {
5451 gnat_type = Underlying_Type (Component_Type (gnat_type));
5452 if (Present (Alignment_Clause (gnat_type)))
5453 *align_clause = true;
5454 }
5455
5456 if (!Is_Scalar_Type (gnat_type))
5457 return false;
5458
5459 if (UI_To_Int (Esize (gnat_type)) < 64)
5460 return false;
5461
5462 return true;
5463 }
5464
5465 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5466 component of an aggregate type. */
5467
5468 bool
5469 type_for_nonaliased_component_p (tree gnu_type)
5470 {
5471 /* If the type is passed by reference, we may have pointers to the
5472 component so it cannot be made non-aliased. */
5473 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5474 return false;
5475
5476 /* We used to say that any component of aggregate type is aliased
5477 because the front-end may take 'Reference of it. The front-end
5478 has been enhanced in the meantime so as to use a renaming instead
5479 in most cases, but the back-end can probably take the address of
5480 such a component too so we go for the conservative stance.
5481
5482 For instance, we might need the address of any array type, even
5483 if normally passed by copy, to construct a fat pointer if the
5484 component is used as an actual for an unconstrained formal.
5485
5486 Likewise for record types: even if a specific record subtype is
5487 passed by copy, the parent type might be passed by ref (e.g. if
5488 it's of variable size) and we might take the address of a child
5489 component to pass to a parent formal. We have no way to check
5490 for such conditions here. */
5491 if (AGGREGATE_TYPE_P (gnu_type))
5492 return false;
5493
5494 return true;
5495 }
5496
5497 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5498
5499 bool
5500 smaller_form_type_p (tree type, tree orig_type)
5501 {
5502 tree size, osize;
5503
5504 /* We're not interested in variants here. */
5505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5506 return false;
5507
5508 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5509 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5510 return false;
5511
5512 size = TYPE_SIZE (type);
5513 osize = TYPE_SIZE (orig_type);
5514
5515 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5516 return false;
5517
5518 return tree_int_cst_lt (size, osize) != 0;
5519 }
5520
5521 /* Return whether EXPR, which is the renamed object in an object renaming
5522 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5523 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5524
5525 bool
5526 can_materialize_object_renaming_p (Node_Id expr)
5527 {
5528 while (true)
5529 {
5530 expr = Original_Node (expr);
5531
5532 switch Nkind (expr)
5533 {
5534 case N_Identifier:
5535 case N_Expanded_Name:
5536 if (!Present (Renamed_Object (Entity (expr))))
5537 return true;
5538 expr = Renamed_Object (Entity (expr));
5539 break;
5540
5541 case N_Selected_Component:
5542 {
5543 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5544 return false;
5545
5546 const Uint bitpos
5547 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5548 if (!UI_Is_In_Int_Range (bitpos)
5549 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5550 return false;
5551
5552 expr = Prefix (expr);
5553 break;
5554 }
5555
5556 case N_Indexed_Component:
5557 case N_Slice:
5558 {
5559 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5560
5561 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5562 return false;
5563
5564 expr = Prefix (expr);
5565 break;
5566 }
5567
5568 case N_Explicit_Dereference:
5569 expr = Prefix (expr);
5570 break;
5571
5572 default:
5573 return true;
5574 };
5575 }
5576 }
5577
5578 /* Perform final processing on global declarations. */
5579
5580 static GTY (()) tree dummy_global;
5581
5582 void
5583 gnat_write_global_declarations (void)
5584 {
5585 unsigned int i;
5586 tree iter;
5587
5588 /* If we have declared types as used at the global level, insert them in
5589 the global hash table. We use a dummy variable for this purpose, but
5590 we need to build it unconditionally to avoid -fcompare-debug issues. */
5591 if (first_global_object_name)
5592 {
5593 struct varpool_node *node;
5594 char *label;
5595
5596 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5597 dummy_global
5598 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5599 void_type_node);
5600 DECL_HARD_REGISTER (dummy_global) = 1;
5601 TREE_STATIC (dummy_global) = 1;
5602 node = varpool_node::get_create (dummy_global);
5603 node->definition = 1;
5604 node->force_output = 1;
5605
5606 if (types_used_by_cur_var_decl)
5607 while (!types_used_by_cur_var_decl->is_empty ())
5608 {
5609 tree t = types_used_by_cur_var_decl->pop ();
5610 types_used_by_var_decl_insert (t, dummy_global);
5611 }
5612 }
5613
5614 /* Output debug information for all global type declarations first. This
5615 ensures that global types whose compilation hasn't been finalized yet,
5616 for example pointers to Taft amendment types, have their compilation
5617 finalized in the right context. */
5618 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5619 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5620 debug_hooks->type_decl (iter, false);
5621
5622 /* Output imported functions. */
5623 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5624 if (TREE_CODE (iter) == FUNCTION_DECL
5625 && DECL_EXTERNAL (iter)
5626 && DECL_INITIAL (iter) == NULL
5627 && !DECL_IGNORED_P (iter)
5628 && DECL_FUNCTION_IS_DEF (iter))
5629 debug_hooks->early_global_decl (iter);
5630
5631 /* Then output the global variables. We need to do that after the debug
5632 information for global types is emitted so that they are finalized. Skip
5633 external global variables, unless we need to emit debug info for them:
5634 this is useful for imported variables, for instance. */
5635 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5636 if (TREE_CODE (iter) == VAR_DECL
5637 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5638 rest_of_decl_compilation (iter, true, 0);
5639
5640 /* Output the imported modules/declarations. In GNAT, these are only
5641 materializing subprogram. */
5642 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5643 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5644 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5645 DECL_CONTEXT (iter), false, false);
5646 }
5647
5648 /* ************************************************************************
5649 * * GCC builtins support *
5650 * ************************************************************************ */
5651
5652 /* The general scheme is fairly simple:
5653
5654 For each builtin function/type to be declared, gnat_install_builtins calls
5655 internal facilities which eventually get to gnat_pushdecl, which in turn
5656 tracks the so declared builtin function decls in the 'builtin_decls' global
5657 datastructure. When an Intrinsic subprogram declaration is processed, we
5658 search this global datastructure to retrieve the associated BUILT_IN DECL
5659 node. */
5660
5661 /* Search the chain of currently available builtin declarations for a node
5662 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5663 found, if any, or NULL_TREE otherwise. */
5664 tree
5665 builtin_decl_for (tree name)
5666 {
5667 unsigned i;
5668 tree decl;
5669
5670 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5671 if (DECL_NAME (decl) == name)
5672 return decl;
5673
5674 return NULL_TREE;
5675 }
5676
5677 /* The code below eventually exposes gnat_install_builtins, which declares
5678 the builtin types and functions we might need, either internally or as
5679 user accessible facilities.
5680
5681 ??? This is a first implementation shot, still in rough shape. It is
5682 heavily inspired from the "C" family implementation, with chunks copied
5683 verbatim from there.
5684
5685 Two obvious improvement candidates are:
5686 o Use a more efficient name/decl mapping scheme
5687 o Devise a middle-end infrastructure to avoid having to copy
5688 pieces between front-ends. */
5689
5690 /* ----------------------------------------------------------------------- *
5691 * BUILTIN ELEMENTARY TYPES *
5692 * ----------------------------------------------------------------------- */
5693
5694 /* Standard data types to be used in builtin argument declarations. */
5695
5696 enum c_tree_index
5697 {
5698 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5699 CTI_STRING_TYPE,
5700 CTI_CONST_STRING_TYPE,
5701
5702 CTI_MAX
5703 };
5704
5705 static tree c_global_trees[CTI_MAX];
5706
5707 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5708 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5709 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5710
5711 /* ??? In addition some attribute handlers, we currently don't support a
5712 (small) number of builtin-types, which in turns inhibits support for a
5713 number of builtin functions. */
5714 #define wint_type_node void_type_node
5715 #define intmax_type_node void_type_node
5716 #define uintmax_type_node void_type_node
5717
5718 /* Used to help initialize the builtin-types.def table. When a type of
5719 the correct size doesn't exist, use error_mark_node instead of NULL.
5720 The later results in segfaults even when a decl using the type doesn't
5721 get invoked. */
5722
5723 static tree
5724 builtin_type_for_size (int size, bool unsignedp)
5725 {
5726 tree type = gnat_type_for_size (size, unsignedp);
5727 return type ? type : error_mark_node;
5728 }
5729
5730 /* Build/push the elementary type decls that builtin functions/types
5731 will need. */
5732
5733 static void
5734 install_builtin_elementary_types (void)
5735 {
5736 signed_size_type_node = gnat_signed_type_for (size_type_node);
5737 pid_type_node = integer_type_node;
5738
5739 string_type_node = build_pointer_type (char_type_node);
5740 const_string_type_node
5741 = build_pointer_type (build_qualified_type
5742 (char_type_node, TYPE_QUAL_CONST));
5743 }
5744
5745 /* ----------------------------------------------------------------------- *
5746 * BUILTIN FUNCTION TYPES *
5747 * ----------------------------------------------------------------------- */
5748
5749 /* Now, builtin function types per se. */
5750
5751 enum c_builtin_type
5752 {
5753 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5754 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5755 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5756 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5757 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5758 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5759 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5760 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5761 ARG6) NAME,
5762 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5763 ARG6, ARG7) NAME,
5764 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5765 ARG6, ARG7, ARG8) NAME,
5766 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5767 ARG6, ARG7, ARG8, ARG9) NAME,
5768 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5769 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5770 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5771 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5772 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5773 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5774 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5775 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5776 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5777 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5778 NAME,
5779 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5780 ARG6) NAME,
5781 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5782 ARG6, ARG7) NAME,
5783 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5784 #include "builtin-types.def"
5785 #undef DEF_PRIMITIVE_TYPE
5786 #undef DEF_FUNCTION_TYPE_0
5787 #undef DEF_FUNCTION_TYPE_1
5788 #undef DEF_FUNCTION_TYPE_2
5789 #undef DEF_FUNCTION_TYPE_3
5790 #undef DEF_FUNCTION_TYPE_4
5791 #undef DEF_FUNCTION_TYPE_5
5792 #undef DEF_FUNCTION_TYPE_6
5793 #undef DEF_FUNCTION_TYPE_7
5794 #undef DEF_FUNCTION_TYPE_8
5795 #undef DEF_FUNCTION_TYPE_9
5796 #undef DEF_FUNCTION_TYPE_10
5797 #undef DEF_FUNCTION_TYPE_11
5798 #undef DEF_FUNCTION_TYPE_VAR_0
5799 #undef DEF_FUNCTION_TYPE_VAR_1
5800 #undef DEF_FUNCTION_TYPE_VAR_2
5801 #undef DEF_FUNCTION_TYPE_VAR_3
5802 #undef DEF_FUNCTION_TYPE_VAR_4
5803 #undef DEF_FUNCTION_TYPE_VAR_5
5804 #undef DEF_FUNCTION_TYPE_VAR_6
5805 #undef DEF_FUNCTION_TYPE_VAR_7
5806 #undef DEF_POINTER_TYPE
5807 BT_LAST
5808 };
5809
5810 typedef enum c_builtin_type builtin_type;
5811
5812 /* A temporary array used in communication with def_fn_type. */
5813 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5814
5815 /* A helper function for install_builtin_types. Build function type
5816 for DEF with return type RET and N arguments. If VAR is true, then the
5817 function should be variadic after those N arguments.
5818
5819 Takes special care not to ICE if any of the types involved are
5820 error_mark_node, which indicates that said type is not in fact available
5821 (see builtin_type_for_size). In which case the function type as a whole
5822 should be error_mark_node. */
5823
5824 static void
5825 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5826 {
5827 tree t;
5828 tree *args = XALLOCAVEC (tree, n);
5829 va_list list;
5830 int i;
5831
5832 va_start (list, n);
5833 for (i = 0; i < n; ++i)
5834 {
5835 builtin_type a = (builtin_type) va_arg (list, int);
5836 t = builtin_types[a];
5837 if (t == error_mark_node)
5838 goto egress;
5839 args[i] = t;
5840 }
5841
5842 t = builtin_types[ret];
5843 if (t == error_mark_node)
5844 goto egress;
5845 if (var)
5846 t = build_varargs_function_type_array (t, n, args);
5847 else
5848 t = build_function_type_array (t, n, args);
5849
5850 egress:
5851 builtin_types[def] = t;
5852 va_end (list);
5853 }
5854
5855 /* Build the builtin function types and install them in the builtin_types
5856 array for later use in builtin function decls. */
5857
5858 static void
5859 install_builtin_function_types (void)
5860 {
5861 tree va_list_ref_type_node;
5862 tree va_list_arg_type_node;
5863
5864 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5865 {
5866 va_list_arg_type_node = va_list_ref_type_node =
5867 build_pointer_type (TREE_TYPE (va_list_type_node));
5868 }
5869 else
5870 {
5871 va_list_arg_type_node = va_list_type_node;
5872 va_list_ref_type_node = build_reference_type (va_list_type_node);
5873 }
5874
5875 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5876 builtin_types[ENUM] = VALUE;
5877 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5878 def_fn_type (ENUM, RETURN, 0, 0);
5879 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5880 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5881 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5882 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5883 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5884 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5885 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5886 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5887 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5888 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5889 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5890 ARG6) \
5891 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5892 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5893 ARG6, ARG7) \
5894 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5895 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5896 ARG6, ARG7, ARG8) \
5897 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5898 ARG7, ARG8);
5899 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5900 ARG6, ARG7, ARG8, ARG9) \
5901 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5902 ARG7, ARG8, ARG9);
5903 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5904 ARG6, ARG7, ARG8, ARG9, ARG10) \
5905 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5906 ARG7, ARG8, ARG9, ARG10);
5907 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5908 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5909 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5910 ARG7, ARG8, ARG9, ARG10, ARG11);
5911 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5912 def_fn_type (ENUM, RETURN, 1, 0);
5913 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5914 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5915 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5916 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5917 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5918 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5919 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5920 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5921 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5922 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5923 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5924 ARG6) \
5925 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5926 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5927 ARG6, ARG7) \
5928 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5929 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5930 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5931
5932 #include "builtin-types.def"
5933
5934 #undef DEF_PRIMITIVE_TYPE
5935 #undef DEF_FUNCTION_TYPE_0
5936 #undef DEF_FUNCTION_TYPE_1
5937 #undef DEF_FUNCTION_TYPE_2
5938 #undef DEF_FUNCTION_TYPE_3
5939 #undef DEF_FUNCTION_TYPE_4
5940 #undef DEF_FUNCTION_TYPE_5
5941 #undef DEF_FUNCTION_TYPE_6
5942 #undef DEF_FUNCTION_TYPE_7
5943 #undef DEF_FUNCTION_TYPE_8
5944 #undef DEF_FUNCTION_TYPE_9
5945 #undef DEF_FUNCTION_TYPE_10
5946 #undef DEF_FUNCTION_TYPE_11
5947 #undef DEF_FUNCTION_TYPE_VAR_0
5948 #undef DEF_FUNCTION_TYPE_VAR_1
5949 #undef DEF_FUNCTION_TYPE_VAR_2
5950 #undef DEF_FUNCTION_TYPE_VAR_3
5951 #undef DEF_FUNCTION_TYPE_VAR_4
5952 #undef DEF_FUNCTION_TYPE_VAR_5
5953 #undef DEF_FUNCTION_TYPE_VAR_6
5954 #undef DEF_FUNCTION_TYPE_VAR_7
5955 #undef DEF_POINTER_TYPE
5956 builtin_types[(int) BT_LAST] = NULL_TREE;
5957 }
5958
5959 /* ----------------------------------------------------------------------- *
5960 * BUILTIN ATTRIBUTES *
5961 * ----------------------------------------------------------------------- */
5962
5963 enum built_in_attribute
5964 {
5965 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5966 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5967 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5968 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5969 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5970 #include "builtin-attrs.def"
5971 #undef DEF_ATTR_NULL_TREE
5972 #undef DEF_ATTR_INT
5973 #undef DEF_ATTR_STRING
5974 #undef DEF_ATTR_IDENT
5975 #undef DEF_ATTR_TREE_LIST
5976 ATTR_LAST
5977 };
5978
5979 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5980
5981 static void
5982 install_builtin_attributes (void)
5983 {
5984 /* Fill in the built_in_attributes array. */
5985 #define DEF_ATTR_NULL_TREE(ENUM) \
5986 built_in_attributes[(int) ENUM] = NULL_TREE;
5987 #define DEF_ATTR_INT(ENUM, VALUE) \
5988 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5989 #define DEF_ATTR_STRING(ENUM, VALUE) \
5990 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5991 #define DEF_ATTR_IDENT(ENUM, STRING) \
5992 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5993 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5994 built_in_attributes[(int) ENUM] \
5995 = tree_cons (built_in_attributes[(int) PURPOSE], \
5996 built_in_attributes[(int) VALUE], \
5997 built_in_attributes[(int) CHAIN]);
5998 #include "builtin-attrs.def"
5999 #undef DEF_ATTR_NULL_TREE
6000 #undef DEF_ATTR_INT
6001 #undef DEF_ATTR_STRING
6002 #undef DEF_ATTR_IDENT
6003 #undef DEF_ATTR_TREE_LIST
6004 }
6005
6006 /* Handle a "const" attribute; arguments as in
6007 struct attribute_spec.handler. */
6008
6009 static tree
6010 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6011 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6012 bool *no_add_attrs)
6013 {
6014 if (TREE_CODE (*node) == FUNCTION_DECL)
6015 TREE_READONLY (*node) = 1;
6016 else
6017 *no_add_attrs = true;
6018
6019 return NULL_TREE;
6020 }
6021
6022 /* Handle a "nothrow" attribute; arguments as in
6023 struct attribute_spec.handler. */
6024
6025 static tree
6026 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6027 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6028 bool *no_add_attrs)
6029 {
6030 if (TREE_CODE (*node) == FUNCTION_DECL)
6031 TREE_NOTHROW (*node) = 1;
6032 else
6033 *no_add_attrs = true;
6034
6035 return NULL_TREE;
6036 }
6037
6038 /* Handle a "pure" attribute; arguments as in
6039 struct attribute_spec.handler. */
6040
6041 static tree
6042 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6043 int ARG_UNUSED (flags), bool *no_add_attrs)
6044 {
6045 if (TREE_CODE (*node) == FUNCTION_DECL)
6046 DECL_PURE_P (*node) = 1;
6047 /* TODO: support types. */
6048 else
6049 {
6050 warning (OPT_Wattributes, "%qs attribute ignored",
6051 IDENTIFIER_POINTER (name));
6052 *no_add_attrs = true;
6053 }
6054
6055 return NULL_TREE;
6056 }
6057
6058 /* Handle a "no vops" attribute; arguments as in
6059 struct attribute_spec.handler. */
6060
6061 static tree
6062 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6063 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6064 bool *ARG_UNUSED (no_add_attrs))
6065 {
6066 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6067 DECL_IS_NOVOPS (*node) = 1;
6068 return NULL_TREE;
6069 }
6070
6071 /* Helper for nonnull attribute handling; fetch the operand number
6072 from the attribute argument list. */
6073
6074 static bool
6075 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6076 {
6077 /* Verify the arg number is a constant. */
6078 if (!tree_fits_uhwi_p (arg_num_expr))
6079 return false;
6080
6081 *valp = TREE_INT_CST_LOW (arg_num_expr);
6082 return true;
6083 }
6084
6085 /* Handle the "nonnull" attribute. */
6086 static tree
6087 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6088 tree args, int ARG_UNUSED (flags),
6089 bool *no_add_attrs)
6090 {
6091 tree type = *node;
6092 unsigned HOST_WIDE_INT attr_arg_num;
6093
6094 /* If no arguments are specified, all pointer arguments should be
6095 non-null. Verify a full prototype is given so that the arguments
6096 will have the correct types when we actually check them later.
6097 Avoid diagnosing type-generic built-ins since those have no
6098 prototype. */
6099 if (!args)
6100 {
6101 if (!prototype_p (type)
6102 && (!TYPE_ATTRIBUTES (type)
6103 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6104 {
6105 error ("nonnull attribute without arguments on a non-prototype");
6106 *no_add_attrs = true;
6107 }
6108 return NULL_TREE;
6109 }
6110
6111 /* Argument list specified. Verify that each argument number references
6112 a pointer argument. */
6113 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6114 {
6115 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6116
6117 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6118 {
6119 error ("nonnull argument has invalid operand number (argument %lu)",
6120 (unsigned long) attr_arg_num);
6121 *no_add_attrs = true;
6122 return NULL_TREE;
6123 }
6124
6125 if (prototype_p (type))
6126 {
6127 function_args_iterator iter;
6128 tree argument;
6129
6130 function_args_iter_init (&iter, type);
6131 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6132 {
6133 argument = function_args_iter_cond (&iter);
6134 if (!argument || ck_num == arg_num)
6135 break;
6136 }
6137
6138 if (!argument
6139 || TREE_CODE (argument) == VOID_TYPE)
6140 {
6141 error ("nonnull argument with out-of-range operand number "
6142 "(argument %lu, operand %lu)",
6143 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6144 *no_add_attrs = true;
6145 return NULL_TREE;
6146 }
6147
6148 if (TREE_CODE (argument) != POINTER_TYPE)
6149 {
6150 error ("nonnull argument references non-pointer operand "
6151 "(argument %lu, operand %lu)",
6152 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6153 *no_add_attrs = true;
6154 return NULL_TREE;
6155 }
6156 }
6157 }
6158
6159 return NULL_TREE;
6160 }
6161
6162 /* Handle a "sentinel" attribute. */
6163
6164 static tree
6165 handle_sentinel_attribute (tree *node, tree name, tree args,
6166 int ARG_UNUSED (flags), bool *no_add_attrs)
6167 {
6168 if (!prototype_p (*node))
6169 {
6170 warning (OPT_Wattributes,
6171 "%qs attribute requires prototypes with named arguments",
6172 IDENTIFIER_POINTER (name));
6173 *no_add_attrs = true;
6174 }
6175 else
6176 {
6177 if (!stdarg_p (*node))
6178 {
6179 warning (OPT_Wattributes,
6180 "%qs attribute only applies to variadic functions",
6181 IDENTIFIER_POINTER (name));
6182 *no_add_attrs = true;
6183 }
6184 }
6185
6186 if (args)
6187 {
6188 tree position = TREE_VALUE (args);
6189
6190 if (TREE_CODE (position) != INTEGER_CST)
6191 {
6192 warning (0, "requested position is not an integer constant");
6193 *no_add_attrs = true;
6194 }
6195 else
6196 {
6197 if (tree_int_cst_lt (position, integer_zero_node))
6198 {
6199 warning (0, "requested position is less than zero");
6200 *no_add_attrs = true;
6201 }
6202 }
6203 }
6204
6205 return NULL_TREE;
6206 }
6207
6208 /* Handle a "noreturn" attribute; arguments as in
6209 struct attribute_spec.handler. */
6210
6211 static tree
6212 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6213 int ARG_UNUSED (flags), bool *no_add_attrs)
6214 {
6215 tree type = TREE_TYPE (*node);
6216
6217 /* See FIXME comment in c_common_attribute_table. */
6218 if (TREE_CODE (*node) == FUNCTION_DECL)
6219 TREE_THIS_VOLATILE (*node) = 1;
6220 else if (TREE_CODE (type) == POINTER_TYPE
6221 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6222 TREE_TYPE (*node)
6223 = build_pointer_type
6224 (build_type_variant (TREE_TYPE (type),
6225 TYPE_READONLY (TREE_TYPE (type)), 1));
6226 else
6227 {
6228 warning (OPT_Wattributes, "%qs attribute ignored",
6229 IDENTIFIER_POINTER (name));
6230 *no_add_attrs = true;
6231 }
6232
6233 return NULL_TREE;
6234 }
6235
6236 /* Handle a "noinline" attribute; arguments as in
6237 struct attribute_spec.handler. */
6238
6239 static tree
6240 handle_noinline_attribute (tree *node, tree name,
6241 tree ARG_UNUSED (args),
6242 int ARG_UNUSED (flags), bool *no_add_attrs)
6243 {
6244 if (TREE_CODE (*node) == FUNCTION_DECL)
6245 {
6246 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6247 {
6248 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6249 "with attribute %qs", name, "always_inline");
6250 *no_add_attrs = true;
6251 }
6252 else
6253 DECL_UNINLINABLE (*node) = 1;
6254 }
6255 else
6256 {
6257 warning (OPT_Wattributes, "%qE attribute ignored", name);
6258 *no_add_attrs = true;
6259 }
6260
6261 return NULL_TREE;
6262 }
6263
6264 /* Handle a "noclone" attribute; arguments as in
6265 struct attribute_spec.handler. */
6266
6267 static tree
6268 handle_noclone_attribute (tree *node, tree name,
6269 tree ARG_UNUSED (args),
6270 int ARG_UNUSED (flags), bool *no_add_attrs)
6271 {
6272 if (TREE_CODE (*node) != FUNCTION_DECL)
6273 {
6274 warning (OPT_Wattributes, "%qE attribute ignored", name);
6275 *no_add_attrs = true;
6276 }
6277
6278 return NULL_TREE;
6279 }
6280
6281 /* Handle a "leaf" attribute; arguments as in
6282 struct attribute_spec.handler. */
6283
6284 static tree
6285 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6286 int ARG_UNUSED (flags), bool *no_add_attrs)
6287 {
6288 if (TREE_CODE (*node) != FUNCTION_DECL)
6289 {
6290 warning (OPT_Wattributes, "%qE attribute ignored", name);
6291 *no_add_attrs = true;
6292 }
6293 if (!TREE_PUBLIC (*node))
6294 {
6295 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6296 *no_add_attrs = true;
6297 }
6298
6299 return NULL_TREE;
6300 }
6301
6302 /* Handle a "always_inline" attribute; arguments as in
6303 struct attribute_spec.handler. */
6304
6305 static tree
6306 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6307 int ARG_UNUSED (flags), bool *no_add_attrs)
6308 {
6309 if (TREE_CODE (*node) == FUNCTION_DECL)
6310 {
6311 /* Set the attribute and mark it for disregarding inline limits. */
6312 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6313 }
6314 else
6315 {
6316 warning (OPT_Wattributes, "%qE attribute ignored", name);
6317 *no_add_attrs = true;
6318 }
6319
6320 return NULL_TREE;
6321 }
6322
6323 /* Handle a "malloc" attribute; arguments as in
6324 struct attribute_spec.handler. */
6325
6326 static tree
6327 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6328 int ARG_UNUSED (flags), bool *no_add_attrs)
6329 {
6330 if (TREE_CODE (*node) == FUNCTION_DECL
6331 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6332 DECL_IS_MALLOC (*node) = 1;
6333 else
6334 {
6335 warning (OPT_Wattributes, "%qs attribute ignored",
6336 IDENTIFIER_POINTER (name));
6337 *no_add_attrs = true;
6338 }
6339
6340 return NULL_TREE;
6341 }
6342
6343 /* Fake handler for attributes we don't properly support. */
6344
6345 tree
6346 fake_attribute_handler (tree * ARG_UNUSED (node),
6347 tree ARG_UNUSED (name),
6348 tree ARG_UNUSED (args),
6349 int ARG_UNUSED (flags),
6350 bool * ARG_UNUSED (no_add_attrs))
6351 {
6352 return NULL_TREE;
6353 }
6354
6355 /* Handle a "type_generic" attribute. */
6356
6357 static tree
6358 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6359 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6360 bool * ARG_UNUSED (no_add_attrs))
6361 {
6362 /* Ensure we have a function type. */
6363 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6364
6365 /* Ensure we have a variadic function. */
6366 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6367
6368 return NULL_TREE;
6369 }
6370
6371 /* Handle a "vector_size" attribute; arguments as in
6372 struct attribute_spec.handler. */
6373
6374 static tree
6375 handle_vector_size_attribute (tree *node, tree name, tree args,
6376 int ARG_UNUSED (flags), bool *no_add_attrs)
6377 {
6378 tree type = *node;
6379 tree vector_type;
6380
6381 *no_add_attrs = true;
6382
6383 /* We need to provide for vector pointers, vector arrays, and
6384 functions returning vectors. For example:
6385
6386 __attribute__((vector_size(16))) short *foo;
6387
6388 In this case, the mode is SI, but the type being modified is
6389 HI, so we need to look further. */
6390 while (POINTER_TYPE_P (type)
6391 || TREE_CODE (type) == FUNCTION_TYPE
6392 || TREE_CODE (type) == ARRAY_TYPE)
6393 type = TREE_TYPE (type);
6394
6395 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6396 if (!vector_type)
6397 return NULL_TREE;
6398
6399 /* Build back pointers if needed. */
6400 *node = reconstruct_complex_type (*node, vector_type);
6401
6402 return NULL_TREE;
6403 }
6404
6405 /* Handle a "vector_type" attribute; arguments as in
6406 struct attribute_spec.handler. */
6407
6408 static tree
6409 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6410 int ARG_UNUSED (flags), bool *no_add_attrs)
6411 {
6412 tree type = *node;
6413 tree vector_type;
6414
6415 *no_add_attrs = true;
6416
6417 if (TREE_CODE (type) != ARRAY_TYPE)
6418 {
6419 error ("attribute %qs applies to array types only",
6420 IDENTIFIER_POINTER (name));
6421 return NULL_TREE;
6422 }
6423
6424 vector_type = build_vector_type_for_array (type, name);
6425 if (!vector_type)
6426 return NULL_TREE;
6427
6428 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6429 *node = vector_type;
6430
6431 return NULL_TREE;
6432 }
6433
6434 /* ----------------------------------------------------------------------- *
6435 * BUILTIN FUNCTIONS *
6436 * ----------------------------------------------------------------------- */
6437
6438 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6439 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6440 if nonansi_p and flag_no_nonansi_builtin. */
6441
6442 static void
6443 def_builtin_1 (enum built_in_function fncode,
6444 const char *name,
6445 enum built_in_class fnclass,
6446 tree fntype, tree libtype,
6447 bool both_p, bool fallback_p,
6448 bool nonansi_p ATTRIBUTE_UNUSED,
6449 tree fnattrs, bool implicit_p)
6450 {
6451 tree decl;
6452 const char *libname;
6453
6454 /* Preserve an already installed decl. It most likely was setup in advance
6455 (e.g. as part of the internal builtins) for specific reasons. */
6456 if (builtin_decl_explicit (fncode))
6457 return;
6458
6459 if (fntype == error_mark_node)
6460 return;
6461
6462 gcc_assert ((!both_p && !fallback_p)
6463 || !strncmp (name, "__builtin_",
6464 strlen ("__builtin_")));
6465
6466 libname = name + strlen ("__builtin_");
6467 decl = add_builtin_function (name, fntype, fncode, fnclass,
6468 (fallback_p ? libname : NULL),
6469 fnattrs);
6470 if (both_p)
6471 /* ??? This is normally further controlled by command-line options
6472 like -fno-builtin, but we don't have them for Ada. */
6473 add_builtin_function (libname, libtype, fncode, fnclass,
6474 NULL, fnattrs);
6475
6476 set_builtin_decl (fncode, decl, implicit_p);
6477 }
6478
6479 static int flag_isoc94 = 0;
6480 static int flag_isoc99 = 0;
6481 static int flag_isoc11 = 0;
6482
6483 /* Install what the common builtins.def offers. */
6484
6485 static void
6486 install_builtin_functions (void)
6487 {
6488 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6489 NONANSI_P, ATTRS, IMPLICIT, COND) \
6490 if (NAME && COND) \
6491 def_builtin_1 (ENUM, NAME, CLASS, \
6492 builtin_types[(int) TYPE], \
6493 builtin_types[(int) LIBTYPE], \
6494 BOTH_P, FALLBACK_P, NONANSI_P, \
6495 built_in_attributes[(int) ATTRS], IMPLICIT);
6496 #include "builtins.def"
6497 }
6498
6499 /* ----------------------------------------------------------------------- *
6500 * BUILTIN FUNCTIONS *
6501 * ----------------------------------------------------------------------- */
6502
6503 /* Install the builtin functions we might need. */
6504
6505 void
6506 gnat_install_builtins (void)
6507 {
6508 install_builtin_elementary_types ();
6509 install_builtin_function_types ();
6510 install_builtin_attributes ();
6511
6512 /* Install builtins used by generic middle-end pieces first. Some of these
6513 know about internal specificities and control attributes accordingly, for
6514 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6515 the generic definition from builtins.def. */
6516 build_common_builtin_nodes ();
6517
6518 /* Now, install the target specific builtins, such as the AltiVec family on
6519 ppc, and the common set as exposed by builtins.def. */
6520 targetm.init_builtins ();
6521 install_builtin_functions ();
6522 }
6523
6524 #include "gt-ada-utils.h"
6525 #include "gtype-ada.h"