9ade57bf44db4efc96f509cd9d3af185816a47b0
[gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2018, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "diagnostic.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "toplev.h"
41 #include "output.h"
42 #include "debug.h"
43 #include "convert.h"
44 #include "common/common-target.h"
45 #include "langhooks.h"
46 #include "tree-dump.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "nlists.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* The default alignment of "double" floating-point types, i.e. floating
64 point types whose size is equal to 64 bits, or 0 if this alignment is
65 not specifically capped. */
66 int double_float_alignment;
67
68 /* The default alignment of "double" or larger scalar types, i.e. scalar
69 types whose size is greater or equal to 64 bits, or 0 if this alignment
70 is not specifically capped. */
71 int double_scalar_alignment;
72
73 /* True if floating-point arithmetics may use wider intermediate results. */
74 bool fp_arith_may_widen = true;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_noinline_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_noclone_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
101
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req,
111 affects_type_identity, handler, exclude } */
112 { "const", 0, 0, true, false, false, false,
113 handle_const_attribute, NULL },
114 { "nothrow", 0, 0, true, false, false, false,
115 handle_nothrow_attribute, NULL },
116 { "pure", 0, 0, true, false, false, false,
117 handle_pure_attribute, NULL },
118 { "no vops", 0, 0, true, false, false, false,
119 handle_novops_attribute, NULL },
120 { "nonnull", 0, -1, false, true, true, false,
121 handle_nonnull_attribute, NULL },
122 { "sentinel", 0, 1, false, true, true, false,
123 handle_sentinel_attribute, NULL },
124 { "noreturn", 0, 0, true, false, false, false,
125 handle_noreturn_attribute, NULL },
126 { "noinline", 0, 0, true, false, false, false,
127 handle_noinline_attribute, NULL },
128 { "noclone", 0, 0, true, false, false, false,
129 handle_noclone_attribute, NULL },
130 { "leaf", 0, 0, true, false, false, false,
131 handle_leaf_attribute, NULL },
132 { "always_inline",0, 0, true, false, false, false,
133 handle_always_inline_attribute, NULL },
134 { "malloc", 0, 0, true, false, false, false,
135 handle_malloc_attribute, NULL },
136 { "type generic", 0, 0, false, true, true, false,
137 handle_type_generic_attribute, NULL },
138
139 { "vector_size", 1, 1, false, true, false, false,
140 handle_vector_size_attribute, NULL },
141 { "vector_type", 0, 0, false, true, false, false,
142 handle_vector_type_attribute, NULL },
143 { "may_alias", 0, 0, false, true, false, false, NULL, NULL },
144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, false, fake_attribute_handler,
149 NULL },
150 { "format_arg", 1, 1, false, true, true, false, fake_attribute_handler,
151 NULL },
152
153 { NULL, 0, 0, false, false, false, false, NULL, NULL }
154 };
155
156 /* Associates a GNAT tree node to a GCC tree node. It is used in
157 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
158 of `save_gnu_tree' for more info. */
159 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
160
161 #define GET_GNU_TREE(GNAT_ENTITY) \
162 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
163
164 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
165 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
168 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
171 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
172
173 #define GET_DUMMY_NODE(GNAT_ENTITY) \
174 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
175
176 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
177 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
178
179 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
180 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
181
182 /* This variable keeps a table for types for each precision so that we only
183 allocate each of them once. Signed and unsigned types are kept separate.
184
185 Note that these types are only used when fold-const requests something
186 special. Perhaps we should NOT share these types; we'll see how it
187 goes later. */
188 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
189
190 /* Likewise for float types, but record these by mode. */
191 static GTY(()) tree float_types[NUM_MACHINE_MODES];
192
193 /* For each binding contour we allocate a binding_level structure to indicate
194 the binding depth. */
195
196 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
197 /* The binding level containing this one (the enclosing binding level). */
198 struct gnat_binding_level *chain;
199 /* The BLOCK node for this level. */
200 tree block;
201 /* If nonzero, the setjmp buffer that needs to be updated for any
202 variable-sized definition within this context. */
203 tree jmpbuf_decl;
204 };
205
206 /* The binding level currently in effect. */
207 static GTY(()) struct gnat_binding_level *current_binding_level;
208
209 /* A chain of gnat_binding_level structures awaiting reuse. */
210 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
211
212 /* The context to be used for global declarations. */
213 static GTY(()) tree global_context;
214
215 /* An array of global declarations. */
216 static GTY(()) vec<tree, va_gc> *global_decls;
217
218 /* An array of builtin function declarations. */
219 static GTY(()) vec<tree, va_gc> *builtin_decls;
220
221 /* A chain of unused BLOCK nodes. */
222 static GTY((deletable)) tree free_block_chain;
223
224 /* A hash table of padded types. It is modelled on the generic type
225 hash table in tree.c, which must thus be used as a reference. */
226
227 struct GTY((for_user)) pad_type_hash
228 {
229 hashval_t hash;
230 tree type;
231 };
232
233 struct pad_type_hasher : ggc_cache_ptr_hash<pad_type_hash>
234 {
235 static inline hashval_t hash (pad_type_hash *t) { return t->hash; }
236 static bool equal (pad_type_hash *a, pad_type_hash *b);
237
238 static int
239 keep_cache_entry (pad_type_hash *&t)
240 {
241 return ggc_marked_p (t->type);
242 }
243 };
244
245 static GTY ((cache)) hash_table<pad_type_hasher> *pad_type_hash_table;
246
247 static tree merge_sizes (tree, tree, tree, bool, bool);
248 static tree fold_bit_position (const_tree);
249 static tree compute_related_constant (tree, tree);
250 static tree split_plus (tree, tree *);
251 static tree float_type_for_precision (int, machine_mode);
252 static tree convert_to_fat_pointer (tree, tree);
253 static unsigned int scale_by_factor_of (tree, unsigned int);
254 static bool potential_alignment_gap (tree, tree, tree);
255
256 /* Linked list used as a queue to defer the initialization of the DECL_CONTEXT
257 of ..._DECL nodes and of the TYPE_CONTEXT of ..._TYPE nodes. */
258 struct deferred_decl_context_node
259 {
260 /* The ..._DECL node to work on. */
261 tree decl;
262
263 /* The corresponding entity's Scope. */
264 Entity_Id gnat_scope;
265
266 /* The value of force_global when DECL was pushed. */
267 int force_global;
268
269 /* The list of ..._TYPE nodes to propagate the context to. */
270 vec<tree> types;
271
272 /* The next queue item. */
273 struct deferred_decl_context_node *next;
274 };
275
276 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
277
278 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
279 feed it with the elaboration of GNAT_SCOPE. */
280 static struct deferred_decl_context_node *
281 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
282
283 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
284 feed it with the DECL_CONTEXT computed as part of N as soon as it is
285 computed. */
286 static void add_deferred_type_context (struct deferred_decl_context_node *n,
287 tree type);
288 \f
289 /* Initialize data structures of the utils.c module. */
290
291 void
292 init_gnat_utils (void)
293 {
294 /* Initialize the association of GNAT nodes to GCC trees. */
295 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
296
297 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
298 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
299
300 /* Initialize the hash table of padded types. */
301 pad_type_hash_table = hash_table<pad_type_hasher>::create_ggc (512);
302 }
303
304 /* Destroy data structures of the utils.c module. */
305
306 void
307 destroy_gnat_utils (void)
308 {
309 /* Destroy the association of GNAT nodes to GCC trees. */
310 ggc_free (associate_gnat_to_gnu);
311 associate_gnat_to_gnu = NULL;
312
313 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
314 ggc_free (dummy_node_table);
315 dummy_node_table = NULL;
316
317 /* Destroy the hash table of padded types. */
318 pad_type_hash_table->empty ();
319 pad_type_hash_table = NULL;
320 }
321 \f
322 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
323 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
324 If NO_CHECK is true, the latter check is suppressed.
325
326 If GNU_DECL is zero, reset a previous association. */
327
328 void
329 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
330 {
331 /* Check that GNAT_ENTITY is not already defined and that it is being set
332 to something which is a decl. If that is not the case, this usually
333 means GNAT_ENTITY is defined twice, but occasionally is due to some
334 Gigi problem. */
335 gcc_assert (!(gnu_decl
336 && (PRESENT_GNU_TREE (gnat_entity)
337 || (!no_check && !DECL_P (gnu_decl)))));
338
339 SET_GNU_TREE (gnat_entity, gnu_decl);
340 }
341
342 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
343 that was associated with it. If there is no such tree node, abort.
344
345 In some cases, such as delayed elaboration or expressions that need to
346 be elaborated only once, GNAT_ENTITY is really not an entity. */
347
348 tree
349 get_gnu_tree (Entity_Id gnat_entity)
350 {
351 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
352 return GET_GNU_TREE (gnat_entity);
353 }
354
355 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
356
357 bool
358 present_gnu_tree (Entity_Id gnat_entity)
359 {
360 return PRESENT_GNU_TREE (gnat_entity);
361 }
362 \f
363 /* Make a dummy type corresponding to GNAT_TYPE. */
364
365 tree
366 make_dummy_type (Entity_Id gnat_type)
367 {
368 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
369 tree gnu_type, debug_type;
370
371 /* If there was no equivalent type (can only happen when just annotating
372 types) or underlying type, go back to the original type. */
373 if (No (gnat_equiv))
374 gnat_equiv = gnat_type;
375
376 /* If it there already a dummy type, use that one. Else make one. */
377 if (PRESENT_DUMMY_NODE (gnat_equiv))
378 return GET_DUMMY_NODE (gnat_equiv);
379
380 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
381 an ENUMERAL_TYPE. */
382 gnu_type = make_node (Is_Record_Type (gnat_equiv)
383 ? tree_code_for_record_type (gnat_equiv)
384 : ENUMERAL_TYPE);
385 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
386 TYPE_DUMMY_P (gnu_type) = 1;
387 TYPE_STUB_DECL (gnu_type)
388 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
389 if (Is_By_Reference_Type (gnat_equiv))
390 TYPE_BY_REFERENCE_P (gnu_type) = 1;
391
392 SET_DUMMY_NODE (gnat_equiv, gnu_type);
393
394 /* Create a debug type so that debug info consumers only see an unspecified
395 type. */
396 if (Needs_Debug_Info (gnat_type))
397 {
398 debug_type = make_node (LANG_TYPE);
399 SET_TYPE_DEBUG_TYPE (gnu_type, debug_type);
400
401 TYPE_NAME (debug_type) = TYPE_NAME (gnu_type);
402 TYPE_ARTIFICIAL (debug_type) = TYPE_ARTIFICIAL (gnu_type);
403 }
404
405 return gnu_type;
406 }
407
408 /* Return the dummy type that was made for GNAT_TYPE, if any. */
409
410 tree
411 get_dummy_type (Entity_Id gnat_type)
412 {
413 return GET_DUMMY_NODE (gnat_type);
414 }
415
416 /* Build dummy fat and thin pointer types whose designated type is specified
417 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
418
419 void
420 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
421 {
422 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
423 tree gnu_fat_type, fields, gnu_object_type;
424
425 gnu_template_type = make_node (RECORD_TYPE);
426 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
427 TYPE_DUMMY_P (gnu_template_type) = 1;
428 gnu_ptr_template = build_pointer_type (gnu_template_type);
429
430 gnu_array_type = make_node (ENUMERAL_TYPE);
431 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
432 TYPE_DUMMY_P (gnu_array_type) = 1;
433 gnu_ptr_array = build_pointer_type (gnu_array_type);
434
435 gnu_fat_type = make_node (RECORD_TYPE);
436 /* Build a stub DECL to trigger the special processing for fat pointer types
437 in gnat_pushdecl. */
438 TYPE_NAME (gnu_fat_type)
439 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
440 gnu_fat_type);
441 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
442 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
443 DECL_CHAIN (fields)
444 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
445 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
446 finish_fat_pointer_type (gnu_fat_type, fields);
447 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
448 /* Suppress debug info until after the type is completed. */
449 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
450
451 gnu_object_type = make_node (RECORD_TYPE);
452 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
453 TYPE_DUMMY_P (gnu_object_type) = 1;
454
455 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
456 TYPE_REFERENCE_TO (gnu_desig_type) = gnu_fat_type;
457 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
458 }
459 \f
460 /* Return true if we are in the global binding level. */
461
462 bool
463 global_bindings_p (void)
464 {
465 return force_global || !current_function_decl;
466 }
467
468 /* Enter a new binding level. */
469
470 void
471 gnat_pushlevel (void)
472 {
473 struct gnat_binding_level *newlevel = NULL;
474
475 /* Reuse a struct for this binding level, if there is one. */
476 if (free_binding_level)
477 {
478 newlevel = free_binding_level;
479 free_binding_level = free_binding_level->chain;
480 }
481 else
482 newlevel = ggc_alloc<gnat_binding_level> ();
483
484 /* Use a free BLOCK, if any; otherwise, allocate one. */
485 if (free_block_chain)
486 {
487 newlevel->block = free_block_chain;
488 free_block_chain = BLOCK_CHAIN (free_block_chain);
489 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
490 }
491 else
492 newlevel->block = make_node (BLOCK);
493
494 /* Point the BLOCK we just made to its parent. */
495 if (current_binding_level)
496 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
497
498 BLOCK_VARS (newlevel->block) = NULL_TREE;
499 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
500 TREE_USED (newlevel->block) = 1;
501
502 /* Add this level to the front of the chain (stack) of active levels. */
503 newlevel->chain = current_binding_level;
504 newlevel->jmpbuf_decl = NULL_TREE;
505 current_binding_level = newlevel;
506 }
507
508 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
509 and point FNDECL to this BLOCK. */
510
511 void
512 set_current_block_context (tree fndecl)
513 {
514 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
515 DECL_INITIAL (fndecl) = current_binding_level->block;
516 set_block_for_group (current_binding_level->block);
517 }
518
519 /* Set the jmpbuf_decl for the current binding level to DECL. */
520
521 void
522 set_block_jmpbuf_decl (tree decl)
523 {
524 current_binding_level->jmpbuf_decl = decl;
525 }
526
527 /* Get the jmpbuf_decl, if any, for the current binding level. */
528
529 tree
530 get_block_jmpbuf_decl (void)
531 {
532 return current_binding_level->jmpbuf_decl;
533 }
534
535 /* Exit a binding level. Set any BLOCK into the current code group. */
536
537 void
538 gnat_poplevel (void)
539 {
540 struct gnat_binding_level *level = current_binding_level;
541 tree block = level->block;
542
543 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
544 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
545
546 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
547 are no variables free the block and merge its subblocks into those of its
548 parent block. Otherwise, add it to the list of its parent. */
549 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
550 ;
551 else if (!BLOCK_VARS (block))
552 {
553 BLOCK_SUBBLOCKS (level->chain->block)
554 = block_chainon (BLOCK_SUBBLOCKS (block),
555 BLOCK_SUBBLOCKS (level->chain->block));
556 BLOCK_CHAIN (block) = free_block_chain;
557 free_block_chain = block;
558 }
559 else
560 {
561 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
562 BLOCK_SUBBLOCKS (level->chain->block) = block;
563 TREE_USED (block) = 1;
564 set_block_for_group (block);
565 }
566
567 /* Free this binding structure. */
568 current_binding_level = level->chain;
569 level->chain = free_binding_level;
570 free_binding_level = level;
571 }
572
573 /* Exit a binding level and discard the associated BLOCK. */
574
575 void
576 gnat_zaplevel (void)
577 {
578 struct gnat_binding_level *level = current_binding_level;
579 tree block = level->block;
580
581 BLOCK_CHAIN (block) = free_block_chain;
582 free_block_chain = block;
583
584 /* Free this binding structure. */
585 current_binding_level = level->chain;
586 level->chain = free_binding_level;
587 free_binding_level = level;
588 }
589 \f
590 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
591
592 static void
593 gnat_set_type_context (tree type, tree context)
594 {
595 tree decl = TYPE_STUB_DECL (type);
596
597 TYPE_CONTEXT (type) = context;
598
599 while (decl && DECL_PARALLEL_TYPE (decl))
600 {
601 tree parallel_type = DECL_PARALLEL_TYPE (decl);
602
603 /* Give a context to the parallel types and their stub decl, if any.
604 Some parallel types seems to be present in multiple parallel type
605 chains, so don't mess with their context if they already have one. */
606 if (!TYPE_CONTEXT (parallel_type))
607 {
608 if (TYPE_STUB_DECL (parallel_type))
609 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
610 TYPE_CONTEXT (parallel_type) = context;
611 }
612
613 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
614 }
615 }
616
617 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
618 the debug info, or Empty if there is no such scope. If not NULL, set
619 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
620
621 Entity_Id
622 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
623 {
624 Entity_Id gnat_entity;
625
626 if (is_subprogram)
627 *is_subprogram = false;
628
629 if (Nkind (gnat_node) == N_Defining_Identifier
630 || Nkind (gnat_node) == N_Defining_Operator_Symbol)
631 gnat_entity = Scope (gnat_node);
632 else
633 return Empty;
634
635 while (Present (gnat_entity))
636 {
637 switch (Ekind (gnat_entity))
638 {
639 case E_Function:
640 case E_Procedure:
641 if (Present (Protected_Body_Subprogram (gnat_entity)))
642 gnat_entity = Protected_Body_Subprogram (gnat_entity);
643
644 /* If the scope is a subprogram, then just rely on
645 current_function_decl, so that we don't have to defer
646 anything. This is needed because other places rely on the
647 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
648 if (is_subprogram)
649 *is_subprogram = true;
650 return gnat_entity;
651
652 case E_Record_Type:
653 case E_Record_Subtype:
654 return gnat_entity;
655
656 default:
657 /* By default, we are not interested in this particular scope: go to
658 the outer one. */
659 break;
660 }
661
662 gnat_entity = Scope (gnat_entity);
663 }
664
665 return Empty;
666 }
667
668 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing
669 of N otherwise. */
670
671 static void
672 defer_or_set_type_context (tree type, tree context,
673 struct deferred_decl_context_node *n)
674 {
675 if (n)
676 add_deferred_type_context (n, type);
677 else
678 gnat_set_type_context (type, context);
679 }
680
681 /* Return global_context, but create it first if need be. */
682
683 static tree
684 get_global_context (void)
685 {
686 if (!global_context)
687 {
688 global_context
689 = build_translation_unit_decl (get_identifier (main_input_filename));
690 debug_hooks->register_main_translation_unit (global_context);
691 }
692
693 return global_context;
694 }
695
696 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
697 for location information and flag propagation. */
698
699 void
700 gnat_pushdecl (tree decl, Node_Id gnat_node)
701 {
702 tree context = NULL_TREE;
703 struct deferred_decl_context_node *deferred_decl_context = NULL;
704
705 /* If explicitely asked to make DECL global or if it's an imported nested
706 object, short-circuit the regular Scope-based context computation. */
707 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
708 {
709 /* Rely on the GNAT scope, or fallback to the current_function_decl if
710 the GNAT scope reached the global scope, if it reached a subprogram
711 or the declaration is a subprogram or a variable (for them we skip
712 intermediate context types because the subprogram body elaboration
713 machinery and the inliner both expect a subprogram context).
714
715 Falling back to current_function_decl is necessary for implicit
716 subprograms created by gigi, such as the elaboration subprograms. */
717 bool context_is_subprogram = false;
718 const Entity_Id gnat_scope
719 = get_debug_scope (gnat_node, &context_is_subprogram);
720
721 if (Present (gnat_scope)
722 && !context_is_subprogram
723 && TREE_CODE (decl) != FUNCTION_DECL
724 && TREE_CODE (decl) != VAR_DECL)
725 /* Always assume the scope has not been elaborated, thus defer the
726 context propagation to the time its elaboration will be
727 available. */
728 deferred_decl_context
729 = add_deferred_decl_context (decl, gnat_scope, force_global);
730
731 /* External declarations (when force_global > 0) may not be in a
732 local context. */
733 else if (current_function_decl && force_global == 0)
734 context = current_function_decl;
735 }
736
737 /* If either we are forced to be in global mode or if both the GNAT scope and
738 the current_function_decl did not help in determining the context, use the
739 global scope. */
740 if (!deferred_decl_context && !context)
741 context = get_global_context ();
742
743 /* Functions imported in another function are not really nested.
744 For really nested functions mark them initially as needing
745 a static chain for uses of that flag before unnesting;
746 lower_nested_functions will then recompute it. */
747 if (TREE_CODE (decl) == FUNCTION_DECL
748 && !TREE_PUBLIC (decl)
749 && context
750 && (TREE_CODE (context) == FUNCTION_DECL
751 || decl_function_context (context)))
752 DECL_STATIC_CHAIN (decl) = 1;
753
754 if (!deferred_decl_context)
755 DECL_CONTEXT (decl) = context;
756
757 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
758
759 /* Set the location of DECL and emit a declaration for it. */
760 if (Present (gnat_node) && !renaming_from_instantiation_p (gnat_node))
761 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
762
763 add_decl_expr (decl, gnat_node);
764
765 /* Put the declaration on the list. The list of declarations is in reverse
766 order. The list will be reversed later. Put global declarations in the
767 globals list and local ones in the current block. But skip TYPE_DECLs
768 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
769 with the debugger and aren't needed anyway. */
770 if (!(TREE_CODE (decl) == TYPE_DECL
771 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
772 {
773 /* External declarations must go to the binding level they belong to.
774 This will make corresponding imported entities are available in the
775 debugger at the proper time. */
776 if (DECL_EXTERNAL (decl)
777 && TREE_CODE (decl) == FUNCTION_DECL
778 && DECL_BUILT_IN (decl))
779 vec_safe_push (builtin_decls, decl);
780 else if (global_bindings_p ())
781 vec_safe_push (global_decls, decl);
782 else
783 {
784 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
785 BLOCK_VARS (current_binding_level->block) = decl;
786 }
787 }
788
789 /* For the declaration of a type, set its name either if it isn't already
790 set or if the previous type name was not derived from a source name.
791 We'd rather have the type named with a real name and all the pointer
792 types to the same object have the same node, except when the names are
793 both derived from source names. */
794 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
795 {
796 tree t = TREE_TYPE (decl);
797
798 /* Array and pointer types aren't tagged types in the C sense so we need
799 to generate a typedef in DWARF for them and make sure it is preserved,
800 unless the type is artificial. */
801 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
802 && ((TREE_CODE (t) != ARRAY_TYPE && TREE_CODE (t) != POINTER_TYPE)
803 || DECL_ARTIFICIAL (decl)))
804 ;
805 /* For array and pointer types, create the DECL_ORIGINAL_TYPE that will
806 generate the typedef in DWARF. Also do that for fat pointer types
807 because, even though they are tagged types in the C sense, they are
808 still XUP types attached to the base array type at this point. */
809 else if (!DECL_ARTIFICIAL (decl)
810 && (TREE_CODE (t) == ARRAY_TYPE
811 || TREE_CODE (t) == POINTER_TYPE
812 || TYPE_IS_FAT_POINTER_P (t)))
813 {
814 tree tt = build_variant_type_copy (t);
815 TYPE_NAME (tt) = decl;
816 defer_or_set_type_context (tt,
817 DECL_CONTEXT (decl),
818 deferred_decl_context);
819 TREE_TYPE (decl) = tt;
820 if (TYPE_NAME (t)
821 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
822 && DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
823 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
824 else
825 DECL_ORIGINAL_TYPE (decl) = t;
826 /* Array types need to have a name so that they can be related to
827 their GNAT encodings. */
828 if (TREE_CODE (t) == ARRAY_TYPE && !TYPE_NAME (t))
829 TYPE_NAME (t) = DECL_NAME (decl);
830 t = NULL_TREE;
831 }
832 else if (TYPE_NAME (t)
833 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL
834 && DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
835 ;
836 else
837 t = NULL_TREE;
838
839 /* Propagate the name to all the variants, this is needed for the type
840 qualifiers machinery to work properly (see check_qualified_type).
841 Also propagate the context to them. Note that it will be propagated
842 to all parallel types too thanks to gnat_set_type_context. */
843 if (t)
844 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
845 /* ??? Because of the previous kludge, we can have variants of fat
846 pointer types with different names. */
847 if (!(TYPE_IS_FAT_POINTER_P (t)
848 && TYPE_NAME (t)
849 && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
850 {
851 TYPE_NAME (t) = decl;
852 defer_or_set_type_context (t,
853 DECL_CONTEXT (decl),
854 deferred_decl_context);
855 }
856 }
857 }
858 \f
859 /* Create a record type that contains a SIZE bytes long field of TYPE with a
860 starting bit position so that it is aligned to ALIGN bits, and leaving at
861 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
862 record is guaranteed to get. GNAT_NODE is used for the position of the
863 associated TYPE_DECL. */
864
865 tree
866 make_aligning_type (tree type, unsigned int align, tree size,
867 unsigned int base_align, int room, Node_Id gnat_node)
868 {
869 /* We will be crafting a record type with one field at a position set to be
870 the next multiple of ALIGN past record'address + room bytes. We use a
871 record placeholder to express record'address. */
872 tree record_type = make_node (RECORD_TYPE);
873 tree record = build0 (PLACEHOLDER_EXPR, record_type);
874
875 tree record_addr_st
876 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
877
878 /* The diagram below summarizes the shape of what we manipulate:
879
880 <--------- pos ---------->
881 { +------------+-------------+-----------------+
882 record =>{ |############| ... | field (type) |
883 { +------------+-------------+-----------------+
884 |<-- room -->|<- voffset ->|<---- size ----->|
885 o o
886 | |
887 record_addr vblock_addr
888
889 Every length is in sizetype bytes there, except "pos" which has to be
890 set as a bit position in the GCC tree for the record. */
891 tree room_st = size_int (room);
892 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
893 tree voffset_st, pos, field;
894
895 tree name = TYPE_IDENTIFIER (type);
896
897 name = concat_name (name, "ALIGN");
898 TYPE_NAME (record_type) = name;
899
900 /* Compute VOFFSET and then POS. The next byte position multiple of some
901 alignment after some address is obtained by "and"ing the alignment minus
902 1 with the two's complement of the address. */
903 voffset_st = size_binop (BIT_AND_EXPR,
904 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
905 size_int ((align / BITS_PER_UNIT) - 1));
906
907 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
908 pos = size_binop (MULT_EXPR,
909 convert (bitsizetype,
910 size_binop (PLUS_EXPR, room_st, voffset_st)),
911 bitsize_unit_node);
912
913 /* Craft the GCC record representation. We exceptionally do everything
914 manually here because 1) our generic circuitry is not quite ready to
915 handle the complex position/size expressions we are setting up, 2) we
916 have a strong simplifying factor at hand: we know the maximum possible
917 value of voffset, and 3) we have to set/reset at least the sizes in
918 accordance with this maximum value anyway, as we need them to convey
919 what should be "alloc"ated for this type.
920
921 Use -1 as the 'addressable' indication for the field to prevent the
922 creation of a bitfield. We don't need one, it would have damaging
923 consequences on the alignment computation, and create_field_decl would
924 make one without this special argument, for instance because of the
925 complex position expression. */
926 field = create_field_decl (get_identifier ("F"), type, record_type, size,
927 pos, 1, -1);
928 TYPE_FIELDS (record_type) = field;
929
930 SET_TYPE_ALIGN (record_type, base_align);
931 TYPE_USER_ALIGN (record_type) = 1;
932
933 TYPE_SIZE (record_type)
934 = size_binop (PLUS_EXPR,
935 size_binop (MULT_EXPR, convert (bitsizetype, size),
936 bitsize_unit_node),
937 bitsize_int (align + room * BITS_PER_UNIT));
938 TYPE_SIZE_UNIT (record_type)
939 = size_binop (PLUS_EXPR, size,
940 size_int (room + align / BITS_PER_UNIT));
941
942 SET_TYPE_MODE (record_type, BLKmode);
943 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
944
945 /* Declare it now since it will never be declared otherwise. This is
946 necessary to ensure that its subtrees are properly marked. */
947 create_type_decl (name, record_type, true, false, gnat_node);
948
949 return record_type;
950 }
951
952 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
953 as the field type of a packed record if IN_RECORD is true, or as the
954 component type of a packed array if IN_RECORD is false. See if we can
955 rewrite it either as a type that has non-BLKmode, which we can pack
956 tighter in the packed record case, or as a smaller type with at most
957 MAX_ALIGN alignment if the value is non-zero. If so, return the new
958 type; if not, return the original type. */
959
960 tree
961 make_packable_type (tree type, bool in_record, unsigned int max_align)
962 {
963 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
964 unsigned HOST_WIDE_INT new_size;
965 unsigned int align = TYPE_ALIGN (type);
966 unsigned int new_align;
967
968 /* No point in doing anything if the size is zero. */
969 if (size == 0)
970 return type;
971
972 tree new_type = make_node (TREE_CODE (type));
973
974 /* Copy the name and flags from the old type to that of the new.
975 Note that we rely on the pointer equality created here for
976 TYPE_NAME to look through conversions in various places. */
977 TYPE_NAME (new_type) = TYPE_NAME (type);
978 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
979 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
980 TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
981 if (TREE_CODE (type) == RECORD_TYPE)
982 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
983
984 /* If we are in a record and have a small size, set the alignment to
985 try for an integral mode. Otherwise set it to try for a smaller
986 type with BLKmode. */
987 if (in_record && size <= MAX_FIXED_MODE_SIZE)
988 {
989 new_size = ceil_pow2 (size);
990 new_align = MIN (new_size, BIGGEST_ALIGNMENT);
991 SET_TYPE_ALIGN (new_type, new_align);
992 }
993 else
994 {
995 /* Do not try to shrink the size if the RM size is not constant. */
996 if (TYPE_CONTAINS_TEMPLATE_P (type)
997 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
998 return type;
999
1000 /* Round the RM size up to a unit boundary to get the minimal size
1001 for a BLKmode record. Give up if it's already the size and we
1002 don't need to lower the alignment. */
1003 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
1004 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
1005 if (new_size == size && (max_align == 0 || align <= max_align))
1006 return type;
1007
1008 new_align = MIN (new_size & -new_size, BIGGEST_ALIGNMENT);
1009 if (max_align > 0 && new_align > max_align)
1010 new_align = max_align;
1011 SET_TYPE_ALIGN (new_type, MIN (align, new_align));
1012 }
1013
1014 TYPE_USER_ALIGN (new_type) = 1;
1015
1016 /* Now copy the fields, keeping the position and size as we don't want
1017 to change the layout by propagating the packedness downwards. */
1018 tree new_field_list = NULL_TREE;
1019 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1020 {
1021 tree new_field_type = TREE_TYPE (field);
1022 tree new_field, new_size;
1023
1024 if (RECORD_OR_UNION_TYPE_P (new_field_type)
1025 && !TYPE_FAT_POINTER_P (new_field_type)
1026 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
1027 new_field_type = make_packable_type (new_field_type, true, max_align);
1028
1029 /* However, for the last field in a not already packed record type
1030 that is of an aggregate type, we need to use the RM size in the
1031 packable version of the record type, see finish_record_type. */
1032 if (!DECL_CHAIN (field)
1033 && !TYPE_PACKED (type)
1034 && RECORD_OR_UNION_TYPE_P (new_field_type)
1035 && !TYPE_FAT_POINTER_P (new_field_type)
1036 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1037 && TYPE_ADA_SIZE (new_field_type))
1038 new_size = TYPE_ADA_SIZE (new_field_type);
1039 else
1040 new_size = DECL_SIZE (field);
1041
1042 new_field
1043 = create_field_decl (DECL_NAME (field), new_field_type, new_type,
1044 new_size, bit_position (field),
1045 TYPE_PACKED (type),
1046 !DECL_NONADDRESSABLE_P (field));
1047
1048 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (field);
1049 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, field);
1050 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1051 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (field);
1052
1053 DECL_CHAIN (new_field) = new_field_list;
1054 new_field_list = new_field;
1055 }
1056
1057 finish_record_type (new_type, nreverse (new_field_list), 2, false);
1058 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1059 if (TYPE_STUB_DECL (type))
1060 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1061 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1062
1063 /* If this is a padding record, we never want to make the size smaller
1064 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1065 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1066 {
1067 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1068 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1069 new_size = size;
1070 }
1071 else
1072 {
1073 TYPE_SIZE (new_type) = bitsize_int (new_size);
1074 TYPE_SIZE_UNIT (new_type) = size_int (new_size / BITS_PER_UNIT);
1075 }
1076
1077 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1078 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1079
1080 compute_record_mode (new_type);
1081
1082 /* Try harder to get a packable type if necessary, for example
1083 in case the record itself contains a BLKmode field. */
1084 if (in_record && TYPE_MODE (new_type) == BLKmode)
1085 SET_TYPE_MODE (new_type,
1086 mode_for_size_tree (TYPE_SIZE (new_type),
1087 MODE_INT, 1).else_blk ());
1088
1089 /* If neither mode nor size nor alignment shrunk, return the old type. */
1090 if (TYPE_MODE (new_type) == BLKmode && new_size >= size && max_align == 0)
1091 return type;
1092
1093 return new_type;
1094 }
1095
1096 /* Return true if TYPE has an unsigned representation. This needs to be used
1097 when the representation of types whose precision is not equal to their size
1098 is manipulated based on the RM size. */
1099
1100 static inline bool
1101 type_unsigned_for_rm (tree type)
1102 {
1103 /* This is the common case. */
1104 if (TYPE_UNSIGNED (type))
1105 return true;
1106
1107 /* See the E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1108 if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1109 && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1110 return true;
1111
1112 return false;
1113 }
1114
1115 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1116 If TYPE is the best type, return it. Otherwise, make a new type. We
1117 only support new integral and pointer types. FOR_BIASED is true if
1118 we are making a biased type. */
1119
1120 tree
1121 make_type_from_size (tree type, tree size_tree, bool for_biased)
1122 {
1123 unsigned HOST_WIDE_INT size;
1124 bool biased_p;
1125 tree new_type;
1126
1127 /* If size indicates an error, just return TYPE to avoid propagating
1128 the error. Likewise if it's too large to represent. */
1129 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1130 return type;
1131
1132 size = tree_to_uhwi (size_tree);
1133
1134 switch (TREE_CODE (type))
1135 {
1136 case INTEGER_TYPE:
1137 case ENUMERAL_TYPE:
1138 case BOOLEAN_TYPE:
1139 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1140 && TYPE_BIASED_REPRESENTATION_P (type));
1141
1142 /* Integer types with precision 0 are forbidden. */
1143 if (size == 0)
1144 size = 1;
1145
1146 /* Only do something if the type isn't a packed array type and doesn't
1147 already have the proper size and the size isn't too large. */
1148 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1149 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1150 || size > LONG_LONG_TYPE_SIZE)
1151 break;
1152
1153 biased_p |= for_biased;
1154
1155 /* The type should be an unsigned type if the original type is unsigned
1156 or if the lower bound is constant and non-negative or if the type is
1157 biased, see E_Signed_Integer_Subtype case of gnat_to_gnu_entity. */
1158 if (type_unsigned_for_rm (type) || biased_p)
1159 new_type = make_unsigned_type (size);
1160 else
1161 new_type = make_signed_type (size);
1162 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1163 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1164 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1165 /* Copy the name to show that it's essentially the same type and
1166 not a subrange type. */
1167 TYPE_NAME (new_type) = TYPE_NAME (type);
1168 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1169 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1170 return new_type;
1171
1172 case RECORD_TYPE:
1173 /* Do something if this is a fat pointer, in which case we
1174 may need to return the thin pointer. */
1175 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1176 {
1177 scalar_int_mode p_mode;
1178 if (!int_mode_for_size (size, 0).exists (&p_mode)
1179 || !targetm.valid_pointer_mode (p_mode))
1180 p_mode = ptr_mode;
1181 return
1182 build_pointer_type_for_mode
1183 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1184 p_mode, 0);
1185 }
1186 break;
1187
1188 case POINTER_TYPE:
1189 /* Only do something if this is a thin pointer, in which case we
1190 may need to return the fat pointer. */
1191 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1192 return
1193 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1194 break;
1195
1196 default:
1197 break;
1198 }
1199
1200 return type;
1201 }
1202
1203 /* Return true iff the padded types are equivalent. */
1204
1205 bool
1206 pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
1207 {
1208 tree type1, type2;
1209
1210 if (t1->hash != t2->hash)
1211 return 0;
1212
1213 type1 = t1->type;
1214 type2 = t2->type;
1215
1216 /* We consider that the padded types are equivalent if they pad the same type
1217 and have the same size, alignment, RM size and storage order. Taking the
1218 mode into account is redundant since it is determined by the others. */
1219 return
1220 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1221 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1222 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1223 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
1224 && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
1225 }
1226
1227 /* Compute the hash value for the padded TYPE. */
1228
1229 static hashval_t
1230 hash_pad_type (tree type)
1231 {
1232 hashval_t hashcode;
1233
1234 hashcode
1235 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1236 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1237 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1238 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1239
1240 return hashcode;
1241 }
1242
1243 /* Look up the padded TYPE in the hash table and return its canonical version
1244 if it exists; otherwise, insert it into the hash table. */
1245
1246 static tree
1247 canonicalize_pad_type (tree type)
1248 {
1249 const hashval_t hashcode = hash_pad_type (type);
1250 struct pad_type_hash in, *h, **slot;
1251
1252 in.hash = hashcode;
1253 in.type = type;
1254 slot = pad_type_hash_table->find_slot_with_hash (&in, hashcode, INSERT);
1255 h = *slot;
1256 if (!h)
1257 {
1258 h = ggc_alloc<pad_type_hash> ();
1259 h->hash = hashcode;
1260 h->type = type;
1261 *slot = h;
1262 }
1263
1264 return h->type;
1265 }
1266
1267 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1268 if needed. We have already verified that SIZE and ALIGN are large enough.
1269 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1270 IS_COMPONENT_TYPE is true if this is being done for the component type of
1271 an array. IS_USER_TYPE is true if the original type needs to be completed.
1272 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1273 the RM size of the resulting type is to be set to SIZE too; in this case,
1274 the padded type is canonicalized before being returned. */
1275
1276 tree
1277 maybe_pad_type (tree type, tree size, unsigned int align,
1278 Entity_Id gnat_entity, bool is_component_type,
1279 bool is_user_type, bool definition, bool set_rm_size)
1280 {
1281 tree orig_size = TYPE_SIZE (type);
1282 unsigned int orig_align = TYPE_ALIGN (type);
1283 tree record, field;
1284
1285 /* If TYPE is a padded type, see if it agrees with any size and alignment
1286 we were given. If so, return the original type. Otherwise, strip
1287 off the padding, since we will either be returning the inner type
1288 or repadding it. If no size or alignment is specified, use that of
1289 the original padded type. */
1290 if (TYPE_IS_PADDING_P (type))
1291 {
1292 if ((!size
1293 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1294 && (align == 0 || align == orig_align))
1295 return type;
1296
1297 if (!size)
1298 size = orig_size;
1299 if (align == 0)
1300 align = orig_align;
1301
1302 type = TREE_TYPE (TYPE_FIELDS (type));
1303 orig_size = TYPE_SIZE (type);
1304 orig_align = TYPE_ALIGN (type);
1305 }
1306
1307 /* If the size is either not being changed or is being made smaller (which
1308 is not done here and is only valid for bitfields anyway), show the size
1309 isn't changing. Likewise, clear the alignment if it isn't being
1310 changed. Then return if we aren't doing anything. */
1311 if (size
1312 && (operand_equal_p (size, orig_size, 0)
1313 || (TREE_CODE (orig_size) == INTEGER_CST
1314 && tree_int_cst_lt (size, orig_size))))
1315 size = NULL_TREE;
1316
1317 if (align == orig_align)
1318 align = 0;
1319
1320 if (align == 0 && !size)
1321 return type;
1322
1323 /* If requested, complete the original type and give it a name. */
1324 if (is_user_type)
1325 create_type_decl (get_entity_name (gnat_entity), type,
1326 !Comes_From_Source (gnat_entity),
1327 !(TYPE_NAME (type)
1328 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1329 && DECL_IGNORED_P (TYPE_NAME (type))),
1330 gnat_entity);
1331
1332 /* We used to modify the record in place in some cases, but that could
1333 generate incorrect debugging information. So make a new record
1334 type and name. */
1335 record = make_node (RECORD_TYPE);
1336 TYPE_PADDING_P (record) = 1;
1337
1338 /* ??? Padding types around packed array implementation types will be
1339 considered as root types in the array descriptor language hook (see
1340 gnat_get_array_descr_info). Give them the original packed array type
1341 name so that the one coming from sources appears in the debugging
1342 information. */
1343 if (TYPE_IMPL_PACKED_ARRAY_P (type)
1344 && TYPE_ORIGINAL_PACKED_ARRAY (type)
1345 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1346 TYPE_NAME (record) = TYPE_NAME (TYPE_ORIGINAL_PACKED_ARRAY (type));
1347 else if (Present (gnat_entity))
1348 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1349
1350 SET_TYPE_ALIGN (record, align ? align : orig_align);
1351 TYPE_SIZE (record) = size ? size : orig_size;
1352 TYPE_SIZE_UNIT (record)
1353 = convert (sizetype,
1354 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1355 bitsize_unit_node));
1356
1357 /* If we are changing the alignment and the input type is a record with
1358 BLKmode and a small constant size, try to make a form that has an
1359 integral mode. This might allow the padding record to also have an
1360 integral mode, which will be much more efficient. There is no point
1361 in doing so if a size is specified unless it is also a small constant
1362 size and it is incorrect to do so if we cannot guarantee that the mode
1363 will be naturally aligned since the field must always be addressable.
1364
1365 ??? This might not always be a win when done for a stand-alone object:
1366 since the nominal and the effective type of the object will now have
1367 different modes, a VIEW_CONVERT_EXPR will be required for converting
1368 between them and it might be hard to overcome afterwards, including
1369 at the RTL level when the stand-alone object is accessed as a whole. */
1370 if (align != 0
1371 && RECORD_OR_UNION_TYPE_P (type)
1372 && TYPE_MODE (type) == BLKmode
1373 && !TYPE_BY_REFERENCE_P (type)
1374 && TREE_CODE (orig_size) == INTEGER_CST
1375 && !TREE_OVERFLOW (orig_size)
1376 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1377 && (!size
1378 || (TREE_CODE (size) == INTEGER_CST
1379 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1380 {
1381 tree packable_type = make_packable_type (type, true);
1382 if (TYPE_MODE (packable_type) != BLKmode
1383 && align >= TYPE_ALIGN (packable_type))
1384 type = packable_type;
1385 }
1386
1387 /* Now create the field with the original size. */
1388 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1389 bitsize_zero_node, 0, 1);
1390 DECL_INTERNAL_P (field) = 1;
1391
1392 /* We will output additional debug info manually below. */
1393 finish_record_type (record, field, 1, false);
1394
1395 /* Set the RM size if requested. */
1396 if (set_rm_size)
1397 {
1398 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1399
1400 /* If the padded type is complete and has constant size, we canonicalize
1401 it by means of the hash table. This is consistent with the language
1402 semantics and ensures that gigi and the middle-end have a common view
1403 of these padded types. */
1404 if (TREE_CONSTANT (TYPE_SIZE (record)))
1405 {
1406 tree canonical = canonicalize_pad_type (record);
1407 if (canonical != record)
1408 {
1409 record = canonical;
1410 goto built;
1411 }
1412 }
1413 }
1414
1415 if (gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
1416 SET_TYPE_DEBUG_TYPE (record, type);
1417
1418 /* Unless debugging information isn't being written for the input type,
1419 write a record that shows what we are a subtype of and also make a
1420 variable that indicates our size, if still variable. */
1421 if (TREE_CODE (orig_size) != INTEGER_CST
1422 && TYPE_NAME (record)
1423 && TYPE_NAME (type)
1424 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1425 && DECL_IGNORED_P (TYPE_NAME (type))))
1426 {
1427 tree name = TYPE_IDENTIFIER (record);
1428 tree size_unit = TYPE_SIZE_UNIT (record);
1429
1430 /* A variable that holds the size is required even with no encoding since
1431 it will be referenced by debugging information attributes. At global
1432 level, we need a single variable across all translation units. */
1433 if (size
1434 && TREE_CODE (size) != INTEGER_CST
1435 && (definition || global_bindings_p ()))
1436 {
1437 /* Whether or not gnat_entity comes from source, this XVZ variable is
1438 is a compilation artifact. */
1439 size_unit
1440 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1441 size_unit, true, global_bindings_p (),
1442 !definition && global_bindings_p (), false,
1443 false, true, true, NULL, gnat_entity);
1444 TYPE_SIZE_UNIT (record) = size_unit;
1445 }
1446
1447 /* There is no need to show what we are a subtype of when outputting as
1448 few encodings as possible: regular debugging infomation makes this
1449 redundant. */
1450 if (gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
1451 {
1452 tree marker = make_node (RECORD_TYPE);
1453 tree orig_name = TYPE_IDENTIFIER (type);
1454
1455 TYPE_NAME (marker) = concat_name (name, "XVS");
1456 finish_record_type (marker,
1457 create_field_decl (orig_name,
1458 build_reference_type (type),
1459 marker, NULL_TREE, NULL_TREE,
1460 0, 0),
1461 0, true);
1462 TYPE_SIZE_UNIT (marker) = size_unit;
1463
1464 add_parallel_type (record, marker);
1465 }
1466 }
1467
1468 built:
1469 /* If a simple size was explicitly given, maybe issue a warning. */
1470 if (!size
1471 || TREE_CODE (size) == COND_EXPR
1472 || TREE_CODE (size) == MAX_EXPR
1473 || No (gnat_entity))
1474 return record;
1475
1476 /* But don't do it if we are just annotating types and the type is tagged or
1477 concurrent, since these types aren't fully laid out in this mode. */
1478 if (type_annotate_only)
1479 {
1480 Entity_Id gnat_type
1481 = is_component_type
1482 ? Component_Type (gnat_entity) : Etype (gnat_entity);
1483
1484 if (Is_Tagged_Type (gnat_type) || Is_Concurrent_Type (gnat_type))
1485 return record;
1486 }
1487
1488 /* Take the original size as the maximum size of the input if there was an
1489 unconstrained record involved and round it up to the specified alignment,
1490 if one was specified, but only for aggregate types. */
1491 if (CONTAINS_PLACEHOLDER_P (orig_size))
1492 orig_size = max_size (orig_size, true);
1493
1494 if (align && AGGREGATE_TYPE_P (type))
1495 orig_size = round_up (orig_size, align);
1496
1497 if (!operand_equal_p (size, orig_size, 0)
1498 && !(TREE_CODE (size) == INTEGER_CST
1499 && TREE_CODE (orig_size) == INTEGER_CST
1500 && (TREE_OVERFLOW (size)
1501 || TREE_OVERFLOW (orig_size)
1502 || tree_int_cst_lt (size, orig_size))))
1503 {
1504 Node_Id gnat_error_node = Empty;
1505
1506 /* For a packed array, post the message on the original array type. */
1507 if (Is_Packed_Array_Impl_Type (gnat_entity))
1508 gnat_entity = Original_Array_Type (gnat_entity);
1509
1510 if ((Ekind (gnat_entity) == E_Component
1511 || Ekind (gnat_entity) == E_Discriminant)
1512 && Present (Component_Clause (gnat_entity)))
1513 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1514 else if (Present (Size_Clause (gnat_entity)))
1515 gnat_error_node = Expression (Size_Clause (gnat_entity));
1516
1517 /* Generate message only for entities that come from source, since
1518 if we have an entity created by expansion, the message will be
1519 generated for some other corresponding source entity. */
1520 if (Comes_From_Source (gnat_entity))
1521 {
1522 if (Present (gnat_error_node))
1523 post_error_ne_tree ("{^ }bits of & unused?",
1524 gnat_error_node, gnat_entity,
1525 size_diffop (size, orig_size));
1526 else if (is_component_type)
1527 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1528 gnat_entity, gnat_entity,
1529 size_diffop (size, orig_size));
1530 }
1531 }
1532
1533 return record;
1534 }
1535
1536 /* Return true if padded TYPE was built with an RM size. */
1537
1538 bool
1539 pad_type_has_rm_size (tree type)
1540 {
1541 /* This is required for the lookup. */
1542 if (!TREE_CONSTANT (TYPE_SIZE (type)))
1543 return false;
1544
1545 const hashval_t hashcode = hash_pad_type (type);
1546 struct pad_type_hash in, *h;
1547
1548 in.hash = hashcode;
1549 in.type = type;
1550 h = pad_type_hash_table->find_with_hash (&in, hashcode);
1551
1552 /* The types built with an RM size are the canonicalized ones. */
1553 return h && h->type == type;
1554 }
1555
1556 /* Return a copy of the padded TYPE but with reverse storage order. */
1557
1558 tree
1559 set_reverse_storage_order_on_pad_type (tree type)
1560 {
1561 if (flag_checking)
1562 {
1563 /* If the inner type is not scalar then the function does nothing. */
1564 tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
1565 gcc_assert (!AGGREGATE_TYPE_P (inner_type)
1566 && !VECTOR_TYPE_P (inner_type));
1567 }
1568
1569 /* This is required for the canonicalization. */
1570 gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
1571
1572 tree field = copy_node (TYPE_FIELDS (type));
1573 type = copy_type (type);
1574 DECL_CONTEXT (field) = type;
1575 TYPE_FIELDS (type) = field;
1576 TYPE_REVERSE_STORAGE_ORDER (type) = 1;
1577 return canonicalize_pad_type (type);
1578 }
1579 \f
1580 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1581 If this is a multi-dimensional array type, do this recursively.
1582
1583 OP may be
1584 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1585 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1586 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1587
1588 void
1589 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1590 {
1591 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1592 of a one-dimensional array, since the padding has the same alias set
1593 as the field type, but if it's a multi-dimensional array, we need to
1594 see the inner types. */
1595 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1596 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1597 || TYPE_PADDING_P (gnu_old_type)))
1598 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1599
1600 /* Unconstrained array types are deemed incomplete and would thus be given
1601 alias set 0. Retrieve the underlying array type. */
1602 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1603 gnu_old_type
1604 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1605 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1606 gnu_new_type
1607 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1608
1609 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1610 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1611 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1612 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1613
1614 switch (op)
1615 {
1616 case ALIAS_SET_COPY:
1617 /* The alias set shouldn't be copied between array types with different
1618 aliasing settings because this can break the aliasing relationship
1619 between the array type and its element type. */
1620 if (flag_checking || flag_strict_aliasing)
1621 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1622 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1623 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1624 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1625
1626 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1627 break;
1628
1629 case ALIAS_SET_SUBSET:
1630 case ALIAS_SET_SUPERSET:
1631 {
1632 alias_set_type old_set = get_alias_set (gnu_old_type);
1633 alias_set_type new_set = get_alias_set (gnu_new_type);
1634
1635 /* Do nothing if the alias sets conflict. This ensures that we
1636 never call record_alias_subset several times for the same pair
1637 or at all for alias set 0. */
1638 if (!alias_sets_conflict_p (old_set, new_set))
1639 {
1640 if (op == ALIAS_SET_SUBSET)
1641 record_alias_subset (old_set, new_set);
1642 else
1643 record_alias_subset (new_set, old_set);
1644 }
1645 }
1646 break;
1647
1648 default:
1649 gcc_unreachable ();
1650 }
1651
1652 record_component_aliases (gnu_new_type);
1653 }
1654 \f
1655 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1656 ARTIFICIAL_P is true if the type was generated by the compiler. */
1657
1658 void
1659 record_builtin_type (const char *name, tree type, bool artificial_p)
1660 {
1661 tree type_decl = build_decl (input_location,
1662 TYPE_DECL, get_identifier (name), type);
1663 DECL_ARTIFICIAL (type_decl) = artificial_p;
1664 TYPE_ARTIFICIAL (type) = artificial_p;
1665 gnat_pushdecl (type_decl, Empty);
1666
1667 if (debug_hooks->type_decl)
1668 debug_hooks->type_decl (type_decl, false);
1669 }
1670 \f
1671 /* Finish constructing the character type CHAR_TYPE.
1672
1673 In Ada character types are enumeration types and, as a consequence, are
1674 represented in the front-end by integral types holding the positions of
1675 the enumeration values as defined by the language, which means that the
1676 integral types are unsigned.
1677
1678 Unfortunately the signedness of 'char' in C is implementation-defined
1679 and GCC even has the option -fsigned-char to toggle it at run time.
1680 Since GNAT's philosophy is to be compatible with C by default, to wit
1681 Interfaces.C.char is defined as a mere copy of Character, we may need
1682 to declare character types as signed types in GENERIC and generate the
1683 necessary adjustments to make them behave as unsigned types.
1684
1685 The overall strategy is as follows: if 'char' is unsigned, do nothing;
1686 if 'char' is signed, translate character types of CHAR_TYPE_SIZE and
1687 character subtypes with RM_Size = Esize = CHAR_TYPE_SIZE into signed
1688 types. The idea is to ensure that the bit pattern contained in the
1689 Esize'd objects is not changed, even though the numerical value will
1690 be interpreted differently depending on the signedness. */
1691
1692 void
1693 finish_character_type (tree char_type)
1694 {
1695 if (TYPE_UNSIGNED (char_type))
1696 return;
1697
1698 /* Make a copy of a generic unsigned version since we'll modify it. */
1699 tree unsigned_char_type
1700 = (char_type == char_type_node
1701 ? unsigned_char_type_node
1702 : copy_type (gnat_unsigned_type_for (char_type)));
1703
1704 /* Create an unsigned version of the type and set it as debug type. */
1705 TYPE_NAME (unsigned_char_type) = TYPE_NAME (char_type);
1706 TYPE_STRING_FLAG (unsigned_char_type) = TYPE_STRING_FLAG (char_type);
1707 TYPE_ARTIFICIAL (unsigned_char_type) = TYPE_ARTIFICIAL (char_type);
1708 SET_TYPE_DEBUG_TYPE (char_type, unsigned_char_type);
1709
1710 /* If this is a subtype, make the debug type a subtype of the debug type
1711 of the base type and convert literal RM bounds to unsigned. */
1712 if (TREE_TYPE (char_type))
1713 {
1714 tree base_unsigned_char_type = TYPE_DEBUG_TYPE (TREE_TYPE (char_type));
1715 tree min_value = TYPE_RM_MIN_VALUE (char_type);
1716 tree max_value = TYPE_RM_MAX_VALUE (char_type);
1717
1718 if (TREE_CODE (min_value) == INTEGER_CST)
1719 min_value = fold_convert (base_unsigned_char_type, min_value);
1720 if (TREE_CODE (max_value) == INTEGER_CST)
1721 max_value = fold_convert (base_unsigned_char_type, max_value);
1722
1723 TREE_TYPE (unsigned_char_type) = base_unsigned_char_type;
1724 SET_TYPE_RM_MIN_VALUE (unsigned_char_type, min_value);
1725 SET_TYPE_RM_MAX_VALUE (unsigned_char_type, max_value);
1726 }
1727
1728 /* Adjust the RM bounds of the original type to unsigned; that's especially
1729 important for types since they are implicit in this case. */
1730 SET_TYPE_RM_MIN_VALUE (char_type, TYPE_MIN_VALUE (unsigned_char_type));
1731 SET_TYPE_RM_MAX_VALUE (char_type, TYPE_MAX_VALUE (unsigned_char_type));
1732 }
1733
1734 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1735 finish constructing the record type as a fat pointer type. */
1736
1737 void
1738 finish_fat_pointer_type (tree record_type, tree field_list)
1739 {
1740 /* Make sure we can put it into a register. */
1741 if (STRICT_ALIGNMENT)
1742 SET_TYPE_ALIGN (record_type, MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE));
1743
1744 /* Show what it really is. */
1745 TYPE_FAT_POINTER_P (record_type) = 1;
1746
1747 /* Do not emit debug info for it since the types of its fields may still be
1748 incomplete at this point. */
1749 finish_record_type (record_type, field_list, 0, false);
1750
1751 /* Force type_contains_placeholder_p to return true on it. Although the
1752 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1753 type but the representation of the unconstrained array. */
1754 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1755 }
1756
1757 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1758 finish constructing the record or union type. If REP_LEVEL is zero, this
1759 record has no representation clause and so will be entirely laid out here.
1760 If REP_LEVEL is one, this record has a representation clause and has been
1761 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1762 this record is derived from a parent record and thus inherits its layout;
1763 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1764 additional debug info needs to be output for this type. */
1765
1766 void
1767 finish_record_type (tree record_type, tree field_list, int rep_level,
1768 bool debug_info_p)
1769 {
1770 enum tree_code code = TREE_CODE (record_type);
1771 tree name = TYPE_IDENTIFIER (record_type);
1772 tree ada_size = bitsize_zero_node;
1773 tree size = bitsize_zero_node;
1774 bool had_size = TYPE_SIZE (record_type) != 0;
1775 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1776 bool had_align = TYPE_ALIGN (record_type) != 0;
1777 tree field;
1778
1779 TYPE_FIELDS (record_type) = field_list;
1780
1781 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1782 generate debug info and have a parallel type. */
1783 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1784
1785 /* Globally initialize the record first. If this is a rep'ed record,
1786 that just means some initializations; otherwise, layout the record. */
1787 if (rep_level > 0)
1788 {
1789 SET_TYPE_ALIGN (record_type, MAX (BITS_PER_UNIT,
1790 TYPE_ALIGN (record_type)));
1791
1792 if (!had_size_unit)
1793 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1794
1795 if (!had_size)
1796 TYPE_SIZE (record_type) = bitsize_zero_node;
1797
1798 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1799 out just like a UNION_TYPE, since the size will be fixed. */
1800 else if (code == QUAL_UNION_TYPE)
1801 code = UNION_TYPE;
1802 }
1803 else
1804 {
1805 /* Ensure there isn't a size already set. There can be in an error
1806 case where there is a rep clause but all fields have errors and
1807 no longer have a position. */
1808 TYPE_SIZE (record_type) = 0;
1809
1810 /* Ensure we use the traditional GCC layout for bitfields when we need
1811 to pack the record type or have a representation clause. The other
1812 possible layout (Microsoft C compiler), if available, would prevent
1813 efficient packing in almost all cases. */
1814 #ifdef TARGET_MS_BITFIELD_LAYOUT
1815 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1816 decl_attributes (&record_type,
1817 tree_cons (get_identifier ("gcc_struct"),
1818 NULL_TREE, NULL_TREE),
1819 ATTR_FLAG_TYPE_IN_PLACE);
1820 #endif
1821
1822 layout_type (record_type);
1823 }
1824
1825 /* At this point, the position and size of each field is known. It was
1826 either set before entry by a rep clause, or by laying out the type above.
1827
1828 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1829 to compute the Ada size; the GCC size and alignment (for rep'ed records
1830 that are not padding types); and the mode (for rep'ed records). We also
1831 clear the DECL_BIT_FIELD indication for the cases we know have not been
1832 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1833
1834 if (code == QUAL_UNION_TYPE)
1835 field_list = nreverse (field_list);
1836
1837 for (field = field_list; field; field = DECL_CHAIN (field))
1838 {
1839 tree type = TREE_TYPE (field);
1840 tree pos = bit_position (field);
1841 tree this_size = DECL_SIZE (field);
1842 tree this_ada_size;
1843
1844 if (RECORD_OR_UNION_TYPE_P (type)
1845 && !TYPE_FAT_POINTER_P (type)
1846 && !TYPE_CONTAINS_TEMPLATE_P (type)
1847 && TYPE_ADA_SIZE (type))
1848 this_ada_size = TYPE_ADA_SIZE (type);
1849 else
1850 this_ada_size = this_size;
1851
1852 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1853 if (DECL_BIT_FIELD (field)
1854 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1855 {
1856 unsigned int align = TYPE_ALIGN (type);
1857
1858 /* In the general case, type alignment is required. */
1859 if (value_factor_p (pos, align))
1860 {
1861 /* The enclosing record type must be sufficiently aligned.
1862 Otherwise, if no alignment was specified for it and it
1863 has been laid out already, bump its alignment to the
1864 desired one if this is compatible with its size and
1865 maximum alignment, if any. */
1866 if (TYPE_ALIGN (record_type) >= align)
1867 {
1868 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1869 DECL_BIT_FIELD (field) = 0;
1870 }
1871 else if (!had_align
1872 && rep_level == 0
1873 && value_factor_p (TYPE_SIZE (record_type), align)
1874 && (!TYPE_MAX_ALIGN (record_type)
1875 || TYPE_MAX_ALIGN (record_type) >= align))
1876 {
1877 SET_TYPE_ALIGN (record_type, align);
1878 SET_DECL_ALIGN (field, MAX (DECL_ALIGN (field), align));
1879 DECL_BIT_FIELD (field) = 0;
1880 }
1881 }
1882
1883 /* In the non-strict alignment case, only byte alignment is. */
1884 if (!STRICT_ALIGNMENT
1885 && DECL_BIT_FIELD (field)
1886 && value_factor_p (pos, BITS_PER_UNIT))
1887 DECL_BIT_FIELD (field) = 0;
1888 }
1889
1890 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1891 field is technically not addressable. Except that it can actually
1892 be addressed if it is BLKmode and happens to be properly aligned. */
1893 if (DECL_BIT_FIELD (field)
1894 && !(DECL_MODE (field) == BLKmode
1895 && value_factor_p (pos, BITS_PER_UNIT)))
1896 DECL_NONADDRESSABLE_P (field) = 1;
1897
1898 /* A type must be as aligned as its most aligned field that is not
1899 a bit-field. But this is already enforced by layout_type. */
1900 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1901 SET_TYPE_ALIGN (record_type,
1902 MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field)));
1903
1904 switch (code)
1905 {
1906 case UNION_TYPE:
1907 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1908 size = size_binop (MAX_EXPR, size, this_size);
1909 break;
1910
1911 case QUAL_UNION_TYPE:
1912 ada_size
1913 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1914 this_ada_size, ada_size);
1915 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1916 this_size, size);
1917 break;
1918
1919 case RECORD_TYPE:
1920 /* Since we know here that all fields are sorted in order of
1921 increasing bit position, the size of the record is one
1922 higher than the ending bit of the last field processed
1923 unless we have a rep clause, since in that case we might
1924 have a field outside a QUAL_UNION_TYPE that has a higher ending
1925 position. So use a MAX in that case. Also, if this field is a
1926 QUAL_UNION_TYPE, we need to take into account the previous size in
1927 the case of empty variants. */
1928 ada_size
1929 = merge_sizes (ada_size, pos, this_ada_size,
1930 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1931 size
1932 = merge_sizes (size, pos, this_size,
1933 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1934 break;
1935
1936 default:
1937 gcc_unreachable ();
1938 }
1939 }
1940
1941 if (code == QUAL_UNION_TYPE)
1942 nreverse (field_list);
1943
1944 if (rep_level < 2)
1945 {
1946 /* If this is a padding record, we never want to make the size smaller
1947 than what was specified in it, if any. */
1948 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1949 size = TYPE_SIZE (record_type);
1950
1951 /* Now set any of the values we've just computed that apply. */
1952 if (!TYPE_FAT_POINTER_P (record_type)
1953 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1954 SET_TYPE_ADA_SIZE (record_type, ada_size);
1955
1956 if (rep_level > 0)
1957 {
1958 tree size_unit = had_size_unit
1959 ? TYPE_SIZE_UNIT (record_type)
1960 : convert (sizetype,
1961 size_binop (CEIL_DIV_EXPR, size,
1962 bitsize_unit_node));
1963 unsigned int align = TYPE_ALIGN (record_type);
1964
1965 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1966 TYPE_SIZE_UNIT (record_type)
1967 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1968
1969 compute_record_mode (record_type);
1970 }
1971 }
1972
1973 /* Reset the TYPE_MAX_ALIGN field since it's private to gigi. */
1974 TYPE_MAX_ALIGN (record_type) = 0;
1975
1976 if (debug_info_p)
1977 rest_of_record_type_compilation (record_type);
1978 }
1979
1980 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1981 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1982 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1983 moment TYPE will get a context. */
1984
1985 void
1986 add_parallel_type (tree type, tree parallel_type)
1987 {
1988 tree decl = TYPE_STUB_DECL (type);
1989
1990 while (DECL_PARALLEL_TYPE (decl))
1991 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1992
1993 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1994
1995 /* If PARALLEL_TYPE already has a context, we are done. */
1996 if (TYPE_CONTEXT (parallel_type))
1997 return;
1998
1999 /* Otherwise, try to get one from TYPE's context. If so, simply propagate
2000 it to PARALLEL_TYPE. */
2001 if (TYPE_CONTEXT (type))
2002 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
2003
2004 /* Otherwise TYPE has not context yet. We know it will have one thanks to
2005 gnat_pushdecl and then its context will be propagated to PARALLEL_TYPE,
2006 so we have nothing to do in this case. */
2007 }
2008
2009 /* Return true if TYPE has a parallel type. */
2010
2011 static bool
2012 has_parallel_type (tree type)
2013 {
2014 tree decl = TYPE_STUB_DECL (type);
2015
2016 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
2017 }
2018
2019 /* Wrap up compilation of RECORD_TYPE, i.e. output additional debug info
2020 associated with it. It need not be invoked directly in most cases as
2021 finish_record_type takes care of doing so. */
2022
2023 void
2024 rest_of_record_type_compilation (tree record_type)
2025 {
2026 bool var_size = false;
2027 tree field;
2028
2029 /* If this is a padded type, the bulk of the debug info has already been
2030 generated for the field's type. */
2031 if (TYPE_IS_PADDING_P (record_type))
2032 return;
2033
2034 /* If the type already has a parallel type (XVS type), then we're done. */
2035 if (has_parallel_type (record_type))
2036 return;
2037
2038 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
2039 {
2040 /* We need to make an XVE/XVU record if any field has variable size,
2041 whether or not the record does. For example, if we have a union,
2042 it may be that all fields, rounded up to the alignment, have the
2043 same size, in which case we'll use that size. But the debug
2044 output routines (except Dwarf2) won't be able to output the fields,
2045 so we need to make the special record. */
2046 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
2047 /* If a field has a non-constant qualifier, the record will have
2048 variable size too. */
2049 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
2050 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
2051 {
2052 var_size = true;
2053 break;
2054 }
2055 }
2056
2057 /* If this record type is of variable size, make a parallel record type that
2058 will tell the debugger how the former is laid out (see exp_dbug.ads). */
2059 if (var_size && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
2060 {
2061 tree new_record_type
2062 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
2063 ? UNION_TYPE : TREE_CODE (record_type));
2064 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
2065 tree last_pos = bitsize_zero_node;
2066 tree old_field, prev_old_field = NULL_TREE;
2067
2068 new_name
2069 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
2070 ? "XVU" : "XVE");
2071 TYPE_NAME (new_record_type) = new_name;
2072 SET_TYPE_ALIGN (new_record_type, BIGGEST_ALIGNMENT);
2073 TYPE_STUB_DECL (new_record_type)
2074 = create_type_stub_decl (new_name, new_record_type);
2075 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
2076 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
2077 gnat_pushdecl (TYPE_STUB_DECL (new_record_type), Empty);
2078 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
2079 TYPE_SIZE_UNIT (new_record_type)
2080 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
2081
2082 /* Now scan all the fields, replacing each field with a new field
2083 corresponding to the new encoding. */
2084 for (old_field = TYPE_FIELDS (record_type); old_field;
2085 old_field = DECL_CHAIN (old_field))
2086 {
2087 tree field_type = TREE_TYPE (old_field);
2088 tree field_name = DECL_NAME (old_field);
2089 tree curpos = fold_bit_position (old_field);
2090 tree pos, new_field;
2091 bool var = false;
2092 unsigned int align = 0;
2093
2094 /* See how the position was modified from the last position.
2095
2096 There are two basic cases we support: a value was added
2097 to the last position or the last position was rounded to
2098 a boundary and they something was added. Check for the
2099 first case first. If not, see if there is any evidence
2100 of rounding. If so, round the last position and retry.
2101
2102 If this is a union, the position can be taken as zero. */
2103 if (TREE_CODE (new_record_type) == UNION_TYPE)
2104 pos = bitsize_zero_node;
2105 else
2106 pos = compute_related_constant (curpos, last_pos);
2107
2108 if (!pos
2109 && TREE_CODE (curpos) == MULT_EXPR
2110 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
2111 {
2112 tree offset = TREE_OPERAND (curpos, 0);
2113 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2114 align = scale_by_factor_of (offset, align);
2115 last_pos = round_up (last_pos, align);
2116 pos = compute_related_constant (curpos, last_pos);
2117 }
2118 else if (!pos
2119 && TREE_CODE (curpos) == PLUS_EXPR
2120 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
2121 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
2122 && tree_fits_uhwi_p
2123 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
2124 {
2125 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
2126 unsigned HOST_WIDE_INT addend
2127 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
2128 align
2129 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
2130 align = scale_by_factor_of (offset, align);
2131 align = MIN (align, addend & -addend);
2132 last_pos = round_up (last_pos, align);
2133 pos = compute_related_constant (curpos, last_pos);
2134 }
2135 else if (potential_alignment_gap (prev_old_field, old_field, pos))
2136 {
2137 align = TYPE_ALIGN (field_type);
2138 last_pos = round_up (last_pos, align);
2139 pos = compute_related_constant (curpos, last_pos);
2140 }
2141
2142 /* If we can't compute a position, set it to zero.
2143
2144 ??? We really should abort here, but it's too much work
2145 to get this correct for all cases. */
2146 if (!pos)
2147 pos = bitsize_zero_node;
2148
2149 /* See if this type is variable-sized and make a pointer type
2150 and indicate the indirection if so. Beware that the debug
2151 back-end may adjust the position computed above according
2152 to the alignment of the field type, i.e. the pointer type
2153 in this case, if we don't preventively counter that. */
2154 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
2155 {
2156 field_type = build_pointer_type (field_type);
2157 if (align != 0 && TYPE_ALIGN (field_type) > align)
2158 {
2159 field_type = copy_type (field_type);
2160 SET_TYPE_ALIGN (field_type, align);
2161 }
2162 var = true;
2163 }
2164
2165 /* Make a new field name, if necessary. */
2166 if (var || align != 0)
2167 {
2168 char suffix[16];
2169
2170 if (align != 0)
2171 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
2172 align / BITS_PER_UNIT);
2173 else
2174 strcpy (suffix, "XVL");
2175
2176 field_name = concat_name (field_name, suffix);
2177 }
2178
2179 new_field
2180 = create_field_decl (field_name, field_type, new_record_type,
2181 DECL_SIZE (old_field), pos, 0, 0);
2182 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
2183 TYPE_FIELDS (new_record_type) = new_field;
2184
2185 /* If old_field is a QUAL_UNION_TYPE, take its size as being
2186 zero. The only time it's not the last field of the record
2187 is when there are other components at fixed positions after
2188 it (meaning there was a rep clause for every field) and we
2189 want to be able to encode them. */
2190 last_pos = size_binop (PLUS_EXPR, curpos,
2191 (TREE_CODE (TREE_TYPE (old_field))
2192 == QUAL_UNION_TYPE)
2193 ? bitsize_zero_node
2194 : DECL_SIZE (old_field));
2195 prev_old_field = old_field;
2196 }
2197
2198 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2199
2200 add_parallel_type (record_type, new_record_type);
2201 }
2202 }
2203
2204 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2205 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2206 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2207 replace a value of zero with the old size. If HAS_REP is true, we take the
2208 MAX of the end position of this field with LAST_SIZE. In all other cases,
2209 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2210
2211 static tree
2212 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2213 bool has_rep)
2214 {
2215 tree type = TREE_TYPE (last_size);
2216 tree new_size;
2217
2218 if (!special || TREE_CODE (size) != COND_EXPR)
2219 {
2220 new_size = size_binop (PLUS_EXPR, first_bit, size);
2221 if (has_rep)
2222 new_size = size_binop (MAX_EXPR, last_size, new_size);
2223 }
2224
2225 else
2226 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2227 integer_zerop (TREE_OPERAND (size, 1))
2228 ? last_size : merge_sizes (last_size, first_bit,
2229 TREE_OPERAND (size, 1),
2230 1, has_rep),
2231 integer_zerop (TREE_OPERAND (size, 2))
2232 ? last_size : merge_sizes (last_size, first_bit,
2233 TREE_OPERAND (size, 2),
2234 1, has_rep));
2235
2236 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2237 when fed through substitute_in_expr) into thinking that a constant
2238 size is not constant. */
2239 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2240 new_size = TREE_OPERAND (new_size, 0);
2241
2242 return new_size;
2243 }
2244
2245 /* Return the bit position of FIELD, in bits from the start of the record,
2246 and fold it as much as possible. This is a tree of type bitsizetype. */
2247
2248 static tree
2249 fold_bit_position (const_tree field)
2250 {
2251 tree offset = DECL_FIELD_OFFSET (field);
2252 if (TREE_CODE (offset) == MULT_EXPR || TREE_CODE (offset) == PLUS_EXPR)
2253 offset = size_binop (TREE_CODE (offset),
2254 fold_convert (bitsizetype, TREE_OPERAND (offset, 0)),
2255 fold_convert (bitsizetype, TREE_OPERAND (offset, 1)));
2256 else
2257 offset = fold_convert (bitsizetype, offset);
2258 return size_binop (PLUS_EXPR, DECL_FIELD_BIT_OFFSET (field),
2259 size_binop (MULT_EXPR, offset, bitsize_unit_node));
2260 }
2261
2262 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2263 related by the addition of a constant. Return that constant if so. */
2264
2265 static tree
2266 compute_related_constant (tree op0, tree op1)
2267 {
2268 tree factor, op0_var, op1_var, op0_cst, op1_cst, result;
2269
2270 if (TREE_CODE (op0) == MULT_EXPR
2271 && TREE_CODE (op1) == MULT_EXPR
2272 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
2273 && TREE_OPERAND (op1, 1) == TREE_OPERAND (op0, 1))
2274 {
2275 factor = TREE_OPERAND (op0, 1);
2276 op0 = TREE_OPERAND (op0, 0);
2277 op1 = TREE_OPERAND (op1, 0);
2278 }
2279 else
2280 factor = NULL_TREE;
2281
2282 op0_cst = split_plus (op0, &op0_var);
2283 op1_cst = split_plus (op1, &op1_var);
2284 result = size_binop (MINUS_EXPR, op0_cst, op1_cst);
2285
2286 if (operand_equal_p (op0_var, op1_var, 0))
2287 return factor ? size_binop (MULT_EXPR, factor, result) : result;
2288
2289 return NULL_TREE;
2290 }
2291
2292 /* Utility function of above to split a tree OP which may be a sum, into a
2293 constant part, which is returned, and a variable part, which is stored
2294 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2295 bitsizetype. */
2296
2297 static tree
2298 split_plus (tree in, tree *pvar)
2299 {
2300 /* Strip conversions in order to ease the tree traversal and maximize the
2301 potential for constant or plus/minus discovery. We need to be careful
2302 to always return and set *pvar to bitsizetype trees, but it's worth
2303 the effort. */
2304 in = remove_conversions (in, false);
2305
2306 *pvar = convert (bitsizetype, in);
2307
2308 if (TREE_CODE (in) == INTEGER_CST)
2309 {
2310 *pvar = bitsize_zero_node;
2311 return convert (bitsizetype, in);
2312 }
2313 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2314 {
2315 tree lhs_var, rhs_var;
2316 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2317 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2318
2319 if (lhs_var == TREE_OPERAND (in, 0)
2320 && rhs_var == TREE_OPERAND (in, 1))
2321 return bitsize_zero_node;
2322
2323 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2324 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2325 }
2326 else
2327 return bitsize_zero_node;
2328 }
2329 \f
2330 /* Return a copy of TYPE but safe to modify in any way. */
2331
2332 tree
2333 copy_type (tree type)
2334 {
2335 tree new_type = copy_node (type);
2336
2337 /* Unshare the language-specific data. */
2338 if (TYPE_LANG_SPECIFIC (type))
2339 {
2340 TYPE_LANG_SPECIFIC (new_type) = NULL;
2341 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2342 }
2343
2344 /* And the contents of the language-specific slot if needed. */
2345 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2346 && TYPE_RM_VALUES (type))
2347 {
2348 TYPE_RM_VALUES (new_type) = NULL_TREE;
2349 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2350 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2351 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2352 }
2353
2354 /* copy_node clears this field instead of copying it, because it is
2355 aliased with TREE_CHAIN. */
2356 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2357
2358 TYPE_POINTER_TO (new_type) = NULL_TREE;
2359 TYPE_REFERENCE_TO (new_type) = NULL_TREE;
2360 TYPE_MAIN_VARIANT (new_type) = new_type;
2361 TYPE_NEXT_VARIANT (new_type) = NULL_TREE;
2362 TYPE_CANONICAL (new_type) = new_type;
2363
2364 return new_type;
2365 }
2366 \f
2367 /* Return a subtype of sizetype with range MIN to MAX and whose
2368 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2369 of the associated TYPE_DECL. */
2370
2371 tree
2372 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2373 {
2374 /* First build a type for the desired range. */
2375 tree type = build_nonshared_range_type (sizetype, min, max);
2376
2377 /* Then set the index type. */
2378 SET_TYPE_INDEX_TYPE (type, index);
2379 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2380
2381 return type;
2382 }
2383
2384 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2385 sizetype is used. */
2386
2387 tree
2388 create_range_type (tree type, tree min, tree max)
2389 {
2390 tree range_type;
2391
2392 if (!type)
2393 type = sizetype;
2394
2395 /* First build a type with the base range. */
2396 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2397 TYPE_MAX_VALUE (type));
2398
2399 /* Then set the actual range. */
2400 SET_TYPE_RM_MIN_VALUE (range_type, min);
2401 SET_TYPE_RM_MAX_VALUE (range_type, max);
2402
2403 return range_type;
2404 }
2405 \f
2406 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of TYPE.
2407 NAME gives the name of the type to be used in the declaration. */
2408
2409 tree
2410 create_type_stub_decl (tree name, tree type)
2411 {
2412 tree type_decl = build_decl (input_location, TYPE_DECL, name, type);
2413 DECL_ARTIFICIAL (type_decl) = 1;
2414 TYPE_ARTIFICIAL (type) = 1;
2415 return type_decl;
2416 }
2417
2418 /* Return a TYPE_DECL node for TYPE. NAME gives the name of the type to be
2419 used in the declaration. ARTIFICIAL_P is true if the declaration was
2420 generated by the compiler. DEBUG_INFO_P is true if we need to write
2421 debug information about this type. GNAT_NODE is used for the position
2422 of the decl. */
2423
2424 tree
2425 create_type_decl (tree name, tree type, bool artificial_p, bool debug_info_p,
2426 Node_Id gnat_node)
2427 {
2428 enum tree_code code = TREE_CODE (type);
2429 bool is_named
2430 = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2431 tree type_decl;
2432
2433 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2434 gcc_assert (!TYPE_IS_DUMMY_P (type));
2435
2436 /* If the type hasn't been named yet, we're naming it; preserve an existing
2437 TYPE_STUB_DECL that has been attached to it for some purpose. */
2438 if (!is_named && TYPE_STUB_DECL (type))
2439 {
2440 type_decl = TYPE_STUB_DECL (type);
2441 DECL_NAME (type_decl) = name;
2442 }
2443 else
2444 type_decl = build_decl (input_location, TYPE_DECL, name, type);
2445
2446 DECL_ARTIFICIAL (type_decl) = artificial_p;
2447 TYPE_ARTIFICIAL (type) = artificial_p;
2448
2449 /* Add this decl to the current binding level. */
2450 gnat_pushdecl (type_decl, gnat_node);
2451
2452 /* If we're naming the type, equate the TYPE_STUB_DECL to the name. This
2453 causes the name to be also viewed as a "tag" by the debug back-end, with
2454 the advantage that no DW_TAG_typedef is emitted for artificial "tagged"
2455 types in DWARF.
2456
2457 Note that if "type" is used as a DECL_ORIGINAL_TYPE, it may be referenced
2458 from multiple contexts, and "type_decl" references a copy of it: in such a
2459 case, do not mess TYPE_STUB_DECL: we do not want to re-use the TYPE_DECL
2460 with the mechanism above. */
2461 if (!is_named && type != DECL_ORIGINAL_TYPE (type_decl))
2462 TYPE_STUB_DECL (type) = type_decl;
2463
2464 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2465 back-end doesn't support, and for others if we don't need to. */
2466 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2467 DECL_IGNORED_P (type_decl) = 1;
2468
2469 return type_decl;
2470 }
2471 \f
2472 /* Return a VAR_DECL or CONST_DECL node.
2473
2474 NAME gives the name of the variable. ASM_NAME is its assembler name
2475 (if provided). TYPE is its data type (a GCC ..._TYPE node). INIT is
2476 the GCC tree for an optional initial expression; NULL_TREE if none.
2477
2478 CONST_FLAG is true if this variable is constant, in which case we might
2479 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2480
2481 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2482 definition to be made visible outside of the current compilation unit, for
2483 instance variable definitions in a package specification.
2484
2485 EXTERN_FLAG is true when processing an external variable declaration (as
2486 opposed to a definition: no storage is to be allocated for the variable).
2487
2488 STATIC_FLAG is only relevant when not at top level and indicates whether
2489 to always allocate storage to the variable.
2490
2491 VOLATILE_FLAG is true if this variable is declared as volatile.
2492
2493 ARTIFICIAL_P is true if the variable was generated by the compiler.
2494
2495 DEBUG_INFO_P is true if we need to write debug information for it.
2496
2497 ATTR_LIST is the list of attributes to be attached to the variable.
2498
2499 GNAT_NODE is used for the position of the decl. */
2500
2501 tree
2502 create_var_decl (tree name, tree asm_name, tree type, tree init,
2503 bool const_flag, bool public_flag, bool extern_flag,
2504 bool static_flag, bool volatile_flag, bool artificial_p,
2505 bool debug_info_p, struct attrib *attr_list,
2506 Node_Id gnat_node, bool const_decl_allowed_p)
2507 {
2508 /* Whether the object has static storage duration, either explicitly or by
2509 virtue of being declared at the global level. */
2510 const bool static_storage = static_flag || global_bindings_p ();
2511
2512 /* Whether the initializer is constant: for an external object or an object
2513 with static storage duration, we check that the initializer is a valid
2514 constant expression for initializing a static variable; otherwise, we
2515 only check that it is constant. */
2516 const bool init_const
2517 = (init
2518 && gnat_types_compatible_p (type, TREE_TYPE (init))
2519 && (extern_flag || static_storage
2520 ? initializer_constant_valid_p (init, TREE_TYPE (init))
2521 != NULL_TREE
2522 : TREE_CONSTANT (init)));
2523
2524 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2525 case the initializer may be used in lieu of the DECL node (as done in
2526 Identifier_to_gnu). This is useful to prevent the need of elaboration
2527 code when an identifier for which such a DECL is made is in turn used
2528 as an initializer. We used to rely on CONST_DECL vs VAR_DECL for this,
2529 but extra constraints apply to this choice (see below) and they are not
2530 relevant to the distinction we wish to make. */
2531 const bool constant_p = const_flag && init_const;
2532
2533 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2534 and may be used for scalars in general but not for aggregates. */
2535 tree var_decl
2536 = build_decl (input_location,
2537 (constant_p
2538 && const_decl_allowed_p
2539 && !AGGREGATE_TYPE_P (type) ? CONST_DECL : VAR_DECL),
2540 name, type);
2541
2542 /* Detect constants created by the front-end to hold 'reference to function
2543 calls for stabilization purposes. This is needed for renaming. */
2544 if (const_flag && init && POINTER_TYPE_P (type))
2545 {
2546 tree inner = init;
2547 if (TREE_CODE (inner) == COMPOUND_EXPR)
2548 inner = TREE_OPERAND (inner, 1);
2549 inner = remove_conversions (inner, true);
2550 if (TREE_CODE (inner) == ADDR_EXPR
2551 && ((TREE_CODE (TREE_OPERAND (inner, 0)) == CALL_EXPR
2552 && !call_is_atomic_load (TREE_OPERAND (inner, 0)))
2553 || (TREE_CODE (TREE_OPERAND (inner, 0)) == VAR_DECL
2554 && DECL_RETURN_VALUE_P (TREE_OPERAND (inner, 0)))))
2555 DECL_RETURN_VALUE_P (var_decl) = 1;
2556 }
2557
2558 /* If this is external, throw away any initializations (they will be done
2559 elsewhere) unless this is a constant for which we would like to remain
2560 able to get the initializer. If we are defining a global here, leave a
2561 constant initialization and save any variable elaborations for the
2562 elaboration routine. If we are just annotating types, throw away the
2563 initialization if it isn't a constant. */
2564 if ((extern_flag && !constant_p)
2565 || (type_annotate_only && init && !TREE_CONSTANT (init)))
2566 init = NULL_TREE;
2567
2568 /* At the global level, a non-constant initializer generates elaboration
2569 statements. Check that such statements are allowed, that is to say,
2570 not violating a No_Elaboration_Code restriction. */
2571 if (init && !init_const && global_bindings_p ())
2572 Check_Elaboration_Code_Allowed (gnat_node);
2573
2574 /* Attach the initializer, if any. */
2575 DECL_INITIAL (var_decl) = init;
2576
2577 /* Directly set some flags. */
2578 DECL_ARTIFICIAL (var_decl) = artificial_p;
2579 DECL_EXTERNAL (var_decl) = extern_flag;
2580
2581 TREE_CONSTANT (var_decl) = constant_p;
2582 TREE_READONLY (var_decl) = const_flag;
2583
2584 /* The object is public if it is external or if it is declared public
2585 and has static storage duration. */
2586 TREE_PUBLIC (var_decl) = extern_flag || (public_flag && static_storage);
2587
2588 /* We need to allocate static storage for an object with static storage
2589 duration if it isn't external. */
2590 TREE_STATIC (var_decl) = !extern_flag && static_storage;
2591
2592 TREE_SIDE_EFFECTS (var_decl)
2593 = TREE_THIS_VOLATILE (var_decl)
2594 = TYPE_VOLATILE (type) | volatile_flag;
2595
2596 if (TREE_SIDE_EFFECTS (var_decl))
2597 TREE_ADDRESSABLE (var_decl) = 1;
2598
2599 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2600 try to fiddle with DECL_COMMON. However, on platforms that don't
2601 support global BSS sections, uninitialized global variables would
2602 go in DATA instead, thus increasing the size of the executable. */
2603 if (!flag_no_common
2604 && TREE_CODE (var_decl) == VAR_DECL
2605 && TREE_PUBLIC (var_decl)
2606 && !have_global_bss_p ())
2607 DECL_COMMON (var_decl) = 1;
2608
2609 /* Do not emit debug info for a CONST_DECL if optimization isn't enabled,
2610 since we will create an associated variable. Likewise for an external
2611 constant whose initializer is not absolute, because this would mean a
2612 global relocation in a read-only section which runs afoul of the PE-COFF
2613 run-time relocation mechanism. */
2614 if (!debug_info_p
2615 || (TREE_CODE (var_decl) == CONST_DECL && !optimize)
2616 || (extern_flag
2617 && constant_p
2618 && init
2619 && initializer_constant_valid_p (init, TREE_TYPE (init))
2620 != null_pointer_node))
2621 DECL_IGNORED_P (var_decl) = 1;
2622
2623 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2624 if (TREE_CODE (var_decl) == VAR_DECL)
2625 process_attributes (&var_decl, &attr_list, true, gnat_node);
2626
2627 /* Add this decl to the current binding level. */
2628 gnat_pushdecl (var_decl, gnat_node);
2629
2630 if (TREE_CODE (var_decl) == VAR_DECL && asm_name)
2631 {
2632 /* Let the target mangle the name if this isn't a verbatim asm. */
2633 if (*IDENTIFIER_POINTER (asm_name) != '*')
2634 asm_name = targetm.mangle_decl_assembler_name (var_decl, asm_name);
2635
2636 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2637 }
2638
2639 return var_decl;
2640 }
2641 \f
2642 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2643
2644 static bool
2645 aggregate_type_contains_array_p (tree type)
2646 {
2647 switch (TREE_CODE (type))
2648 {
2649 case RECORD_TYPE:
2650 case UNION_TYPE:
2651 case QUAL_UNION_TYPE:
2652 {
2653 tree field;
2654 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2655 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2656 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2657 return true;
2658 return false;
2659 }
2660
2661 case ARRAY_TYPE:
2662 return true;
2663
2664 default:
2665 gcc_unreachable ();
2666 }
2667 }
2668
2669 /* Return a FIELD_DECL node. NAME is the field's name, TYPE is its type and
2670 RECORD_TYPE is the type of the enclosing record. If SIZE is nonzero, it
2671 is the specified size of the field. If POS is nonzero, it is the bit
2672 position. PACKED is 1 if the enclosing record is packed, -1 if it has
2673 Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2674 means we are allowed to take the address of the field; if it is negative,
2675 we should not make a bitfield, which is used by make_aligning_type. */
2676
2677 tree
2678 create_field_decl (tree name, tree type, tree record_type, tree size, tree pos,
2679 int packed, int addressable)
2680 {
2681 tree field_decl = build_decl (input_location, FIELD_DECL, name, type);
2682
2683 DECL_CONTEXT (field_decl) = record_type;
2684 TREE_READONLY (field_decl) = TYPE_READONLY (type);
2685
2686 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2687 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2688 Likewise for an aggregate without specified position that contains an
2689 array, because in this case slices of variable length of this array
2690 must be handled by GCC and variable-sized objects need to be aligned
2691 to at least a byte boundary. */
2692 if (packed && (TYPE_MODE (type) == BLKmode
2693 || (!pos
2694 && AGGREGATE_TYPE_P (type)
2695 && aggregate_type_contains_array_p (type))))
2696 SET_DECL_ALIGN (field_decl, BITS_PER_UNIT);
2697
2698 /* If a size is specified, use it. Otherwise, if the record type is packed
2699 compute a size to use, which may differ from the object's natural size.
2700 We always set a size in this case to trigger the checks for bitfield
2701 creation below, which is typically required when no position has been
2702 specified. */
2703 if (size)
2704 size = convert (bitsizetype, size);
2705 else if (packed == 1)
2706 {
2707 size = rm_size (type);
2708 if (TYPE_MODE (type) == BLKmode)
2709 size = round_up (size, BITS_PER_UNIT);
2710 }
2711
2712 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2713 specified for two reasons: first if the size differs from the natural
2714 size. Second, if the alignment is insufficient. There are a number of
2715 ways the latter can be true.
2716
2717 We never make a bitfield if the type of the field has a nonconstant size,
2718 because no such entity requiring bitfield operations should reach here.
2719
2720 We do *preventively* make a bitfield when there might be the need for it
2721 but we don't have all the necessary information to decide, as is the case
2722 of a field with no specified position in a packed record.
2723
2724 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2725 in layout_decl or finish_record_type to clear the bit_field indication if
2726 it is in fact not needed. */
2727 if (addressable >= 0
2728 && size
2729 && TREE_CODE (size) == INTEGER_CST
2730 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
2731 && (!tree_int_cst_equal (size, TYPE_SIZE (type))
2732 || (pos && !value_factor_p (pos, TYPE_ALIGN (type)))
2733 || packed
2734 || (TYPE_ALIGN (record_type) != 0
2735 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))))
2736 {
2737 DECL_BIT_FIELD (field_decl) = 1;
2738 DECL_SIZE (field_decl) = size;
2739 if (!packed && !pos)
2740 {
2741 if (TYPE_ALIGN (record_type) != 0
2742 && TYPE_ALIGN (record_type) < TYPE_ALIGN (type))
2743 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (record_type));
2744 else
2745 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2746 }
2747 }
2748
2749 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2750
2751 /* Bump the alignment if need be, either for bitfield/packing purposes or
2752 to satisfy the type requirements if no such consideration applies. When
2753 we get the alignment from the type, indicate if this is from an explicit
2754 user request, which prevents stor-layout from lowering it later on. */
2755 {
2756 unsigned int bit_align
2757 = (DECL_BIT_FIELD (field_decl) ? 1
2758 : packed && TYPE_MODE (type) != BLKmode ? BITS_PER_UNIT : 0);
2759
2760 if (bit_align > DECL_ALIGN (field_decl))
2761 SET_DECL_ALIGN (field_decl, bit_align);
2762 else if (!bit_align && TYPE_ALIGN (type) > DECL_ALIGN (field_decl))
2763 {
2764 SET_DECL_ALIGN (field_decl, TYPE_ALIGN (type));
2765 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (type);
2766 }
2767 }
2768
2769 if (pos)
2770 {
2771 /* We need to pass in the alignment the DECL is known to have.
2772 This is the lowest-order bit set in POS, but no more than
2773 the alignment of the record, if one is specified. Note
2774 that an alignment of 0 is taken as infinite. */
2775 unsigned int known_align;
2776
2777 if (tree_fits_uhwi_p (pos))
2778 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2779 else
2780 known_align = BITS_PER_UNIT;
2781
2782 if (TYPE_ALIGN (record_type)
2783 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2784 known_align = TYPE_ALIGN (record_type);
2785
2786 layout_decl (field_decl, known_align);
2787 SET_DECL_OFFSET_ALIGN (field_decl,
2788 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2789 : BITS_PER_UNIT);
2790 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2791 &DECL_FIELD_BIT_OFFSET (field_decl),
2792 DECL_OFFSET_ALIGN (field_decl), pos);
2793 }
2794
2795 /* In addition to what our caller says, claim the field is addressable if we
2796 know that its type is not suitable.
2797
2798 The field may also be "technically" nonaddressable, meaning that even if
2799 we attempt to take the field's address we will actually get the address
2800 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2801 value we have at this point is not accurate enough, so we don't account
2802 for this here and let finish_record_type decide. */
2803 if (!addressable && !type_for_nonaliased_component_p (type))
2804 addressable = 1;
2805
2806 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2807
2808 return field_decl;
2809 }
2810 \f
2811 /* Return a PARM_DECL node with NAME and TYPE. */
2812
2813 tree
2814 create_param_decl (tree name, tree type)
2815 {
2816 tree param_decl = build_decl (input_location, PARM_DECL, name, type);
2817
2818 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2819 can lead to various ABI violations. */
2820 if (targetm.calls.promote_prototypes (NULL_TREE)
2821 && INTEGRAL_TYPE_P (type)
2822 && TYPE_PRECISION (type) < TYPE_PRECISION (integer_type_node))
2823 {
2824 /* We have to be careful about biased types here. Make a subtype
2825 of integer_type_node with the proper biasing. */
2826 if (TREE_CODE (type) == INTEGER_TYPE
2827 && TYPE_BIASED_REPRESENTATION_P (type))
2828 {
2829 tree subtype
2830 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2831 TREE_TYPE (subtype) = integer_type_node;
2832 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2833 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (type));
2834 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (type));
2835 type = subtype;
2836 }
2837 else
2838 type = integer_type_node;
2839 }
2840
2841 DECL_ARG_TYPE (param_decl) = type;
2842 return param_decl;
2843 }
2844 \f
2845 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2846 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2847 changed. GNAT_NODE is used for the position of error messages. */
2848
2849 void
2850 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2851 Node_Id gnat_node)
2852 {
2853 struct attrib *attr;
2854
2855 for (attr = *attr_list; attr; attr = attr->next)
2856 switch (attr->type)
2857 {
2858 case ATTR_MACHINE_ATTRIBUTE:
2859 Sloc_to_locus (Sloc (gnat_node), &input_location);
2860 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2861 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2862 break;
2863
2864 case ATTR_LINK_ALIAS:
2865 if (!DECL_EXTERNAL (*node))
2866 {
2867 TREE_STATIC (*node) = 1;
2868 assemble_alias (*node, attr->name);
2869 }
2870 break;
2871
2872 case ATTR_WEAK_EXTERNAL:
2873 if (SUPPORTS_WEAK)
2874 declare_weak (*node);
2875 else
2876 post_error ("?weak declarations not supported on this target",
2877 attr->error_point);
2878 break;
2879
2880 case ATTR_LINK_SECTION:
2881 if (targetm_common.have_named_sections)
2882 {
2883 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2884 DECL_COMMON (*node) = 0;
2885 }
2886 else
2887 post_error ("?section attributes are not supported for this target",
2888 attr->error_point);
2889 break;
2890
2891 case ATTR_LINK_CONSTRUCTOR:
2892 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2893 TREE_USED (*node) = 1;
2894 break;
2895
2896 case ATTR_LINK_DESTRUCTOR:
2897 DECL_STATIC_DESTRUCTOR (*node) = 1;
2898 TREE_USED (*node) = 1;
2899 break;
2900
2901 case ATTR_THREAD_LOCAL_STORAGE:
2902 set_decl_tls_model (*node, decl_default_tls_model (*node));
2903 DECL_COMMON (*node) = 0;
2904 break;
2905 }
2906
2907 *attr_list = NULL;
2908 }
2909
2910 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2911 a power of 2. */
2912
2913 bool
2914 value_factor_p (tree value, HOST_WIDE_INT factor)
2915 {
2916 if (tree_fits_uhwi_p (value))
2917 return tree_to_uhwi (value) % factor == 0;
2918
2919 if (TREE_CODE (value) == MULT_EXPR)
2920 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2921 || value_factor_p (TREE_OPERAND (value, 1), factor));
2922
2923 return false;
2924 }
2925
2926 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2927 from the parameter association for the instantiation of a generic. We do
2928 not want to emit source location for them: the code generated for their
2929 initialization is likely to disturb debugging. */
2930
2931 bool
2932 renaming_from_instantiation_p (Node_Id gnat_node)
2933 {
2934 if (Nkind (gnat_node) != N_Defining_Identifier
2935 || !Is_Object (gnat_node)
2936 || Comes_From_Source (gnat_node)
2937 || !Present (Renamed_Object (gnat_node)))
2938 return false;
2939
2940 /* Get the object declaration of the renamed object, if any and if the
2941 renamed object is a mere identifier. */
2942 gnat_node = Renamed_Object (gnat_node);
2943 if (Nkind (gnat_node) != N_Identifier)
2944 return false;
2945
2946 gnat_node = Entity (gnat_node);
2947 if (!Present (Parent (gnat_node)))
2948 return false;
2949
2950 gnat_node = Parent (gnat_node);
2951 return
2952 (Present (gnat_node)
2953 && Nkind (gnat_node) == N_Object_Declaration
2954 && Present (Corresponding_Generic_Association (gnat_node)));
2955 }
2956
2957 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2958 feed it with the elaboration of GNAT_SCOPE. */
2959
2960 static struct deferred_decl_context_node *
2961 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2962 {
2963 struct deferred_decl_context_node *new_node;
2964
2965 new_node
2966 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2967 new_node->decl = decl;
2968 new_node->gnat_scope = gnat_scope;
2969 new_node->force_global = force_global;
2970 new_node->types.create (1);
2971 new_node->next = deferred_decl_context_queue;
2972 deferred_decl_context_queue = new_node;
2973 return new_node;
2974 }
2975
2976 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2977 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2978 computed. */
2979
2980 static void
2981 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2982 {
2983 n->types.safe_push (type);
2984 }
2985
2986 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2987 NULL_TREE if it is not available. */
2988
2989 static tree
2990 compute_deferred_decl_context (Entity_Id gnat_scope)
2991 {
2992 tree context;
2993
2994 if (present_gnu_tree (gnat_scope))
2995 context = get_gnu_tree (gnat_scope);
2996 else
2997 return NULL_TREE;
2998
2999 if (TREE_CODE (context) == TYPE_DECL)
3000 {
3001 const tree context_type = TREE_TYPE (context);
3002
3003 /* Skip dummy types: only the final ones can appear in the context
3004 chain. */
3005 if (TYPE_DUMMY_P (context_type))
3006 return NULL_TREE;
3007
3008 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
3009 chain. */
3010 else
3011 context = context_type;
3012 }
3013
3014 return context;
3015 }
3016
3017 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
3018 that cannot be processed yet, remove the other ones. If FORCE is true,
3019 force the processing for all nodes, use the global context when nodes don't
3020 have a GNU translation. */
3021
3022 void
3023 process_deferred_decl_context (bool force)
3024 {
3025 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
3026 struct deferred_decl_context_node *node;
3027
3028 while (*it)
3029 {
3030 bool processed = false;
3031 tree context = NULL_TREE;
3032 Entity_Id gnat_scope;
3033
3034 node = *it;
3035
3036 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
3037 get the first scope. */
3038 gnat_scope = node->gnat_scope;
3039 while (Present (gnat_scope))
3040 {
3041 context = compute_deferred_decl_context (gnat_scope);
3042 if (!force || context)
3043 break;
3044 gnat_scope = get_debug_scope (gnat_scope, NULL);
3045 }
3046
3047 /* Imported declarations must not be in a local context (i.e. not inside
3048 a function). */
3049 if (context && node->force_global > 0)
3050 {
3051 tree ctx = context;
3052
3053 while (ctx)
3054 {
3055 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
3056 ctx = DECL_P (ctx) ? DECL_CONTEXT (ctx) : TYPE_CONTEXT (ctx);
3057 }
3058 }
3059
3060 /* If FORCE, we want to get rid of all nodes in the queue: in case there
3061 was no elaborated scope, use the global context. */
3062 if (force && !context)
3063 context = get_global_context ();
3064
3065 if (context)
3066 {
3067 tree t;
3068 int i;
3069
3070 DECL_CONTEXT (node->decl) = context;
3071
3072 /* Propagate it to the TYPE_CONTEXT attributes of the requested
3073 ..._TYPE nodes. */
3074 FOR_EACH_VEC_ELT (node->types, i, t)
3075 {
3076 gnat_set_type_context (t, context);
3077 }
3078 processed = true;
3079 }
3080
3081 /* If this node has been successfuly processed, remove it from the
3082 queue. Then move to the next node. */
3083 if (processed)
3084 {
3085 *it = node->next;
3086 node->types.release ();
3087 free (node);
3088 }
3089 else
3090 it = &node->next;
3091 }
3092 }
3093
3094 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
3095
3096 static unsigned int
3097 scale_by_factor_of (tree expr, unsigned int value)
3098 {
3099 unsigned HOST_WIDE_INT addend = 0;
3100 unsigned HOST_WIDE_INT factor = 1;
3101
3102 /* Peel conversions around EXPR and try to extract bodies from function
3103 calls: it is possible to get the scale factor from size functions. */
3104 expr = remove_conversions (expr, true);
3105 if (TREE_CODE (expr) == CALL_EXPR)
3106 expr = maybe_inline_call_in_expr (expr);
3107
3108 /* Sometimes we get PLUS_EXPR (BIT_AND_EXPR (..., X), Y), where Y is a
3109 multiple of the scale factor we are looking for. */
3110 if (TREE_CODE (expr) == PLUS_EXPR
3111 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
3112 && tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
3113 {
3114 addend = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3115 expr = TREE_OPERAND (expr, 0);
3116 }
3117
3118 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
3119 corresponding to the number of trailing zeros of the mask. */
3120 if (TREE_CODE (expr) == BIT_AND_EXPR
3121 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
3122 {
3123 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
3124 unsigned int i = 0;
3125
3126 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
3127 {
3128 mask >>= 1;
3129 factor *= 2;
3130 i++;
3131 }
3132 }
3133
3134 /* If the addend is not a multiple of the factor we found, give up. In
3135 theory we could find a smaller common factor but it's useless for our
3136 needs. This situation arises when dealing with a field F1 with no
3137 alignment requirement but that is following a field F2 with such
3138 requirements. As long as we have F2's offset, we don't need alignment
3139 information to compute F1's. */
3140 if (addend % factor != 0)
3141 factor = 1;
3142
3143 return factor * value;
3144 }
3145
3146 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
3147 unless we can prove these 2 fields are laid out in such a way that no gap
3148 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
3149 is the distance in bits between the end of PREV_FIELD and the starting
3150 position of CURR_FIELD. It is ignored if null. */
3151
3152 static bool
3153 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
3154 {
3155 /* If this is the first field of the record, there cannot be any gap */
3156 if (!prev_field)
3157 return false;
3158
3159 /* If the previous field is a union type, then return false: The only
3160 time when such a field is not the last field of the record is when
3161 there are other components at fixed positions after it (meaning there
3162 was a rep clause for every field), in which case we don't want the
3163 alignment constraint to override them. */
3164 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
3165 return false;
3166
3167 /* If the distance between the end of prev_field and the beginning of
3168 curr_field is constant, then there is a gap if the value of this
3169 constant is not null. */
3170 if (offset && tree_fits_uhwi_p (offset))
3171 return !integer_zerop (offset);
3172
3173 /* If the size and position of the previous field are constant,
3174 then check the sum of this size and position. There will be a gap
3175 iff it is not multiple of the current field alignment. */
3176 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
3177 && tree_fits_uhwi_p (bit_position (prev_field)))
3178 return ((tree_to_uhwi (bit_position (prev_field))
3179 + tree_to_uhwi (DECL_SIZE (prev_field)))
3180 % DECL_ALIGN (curr_field) != 0);
3181
3182 /* If both the position and size of the previous field are multiples
3183 of the current field alignment, there cannot be any gap. */
3184 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
3185 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
3186 return false;
3187
3188 /* Fallback, return that there may be a potential gap */
3189 return true;
3190 }
3191
3192 /* Return a LABEL_DECL with NAME. GNAT_NODE is used for the position of
3193 the decl. */
3194
3195 tree
3196 create_label_decl (tree name, Node_Id gnat_node)
3197 {
3198 tree label_decl
3199 = build_decl (input_location, LABEL_DECL, name, void_type_node);
3200
3201 SET_DECL_MODE (label_decl, VOIDmode);
3202
3203 /* Add this decl to the current binding level. */
3204 gnat_pushdecl (label_decl, gnat_node);
3205
3206 return label_decl;
3207 }
3208 \f
3209 /* Return a FUNCTION_DECL node. NAME is the name of the subprogram, ASM_NAME
3210 its assembler name, TYPE its type (a FUNCTION_TYPE node), PARAM_DECL_LIST
3211 the list of its parameters (a list of PARM_DECL nodes chained through the
3212 DECL_CHAIN field).
3213
3214 INLINE_STATUS describes the inline flags to be set on the FUNCTION_DECL.
3215
3216 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
3217 definition to be made visible outside of the current compilation unit.
3218
3219 EXTERN_FLAG is true when processing an external subprogram declaration.
3220
3221 ARTIFICIAL_P is true if the subprogram was generated by the compiler.
3222
3223 DEBUG_INFO_P is true if we need to write debug information for it.
3224
3225 DEFINITION is true if the subprogram is to be considered as a definition.
3226
3227 ATTR_LIST is the list of attributes to be attached to the subprogram.
3228
3229 GNAT_NODE is used for the position of the decl. */
3230
3231 tree
3232 create_subprog_decl (tree name, tree asm_name, tree type, tree param_decl_list,
3233 enum inline_status_t inline_status, bool public_flag,
3234 bool extern_flag, bool artificial_p, bool debug_info_p,
3235 bool definition, struct attrib *attr_list,
3236 Node_Id gnat_node)
3237 {
3238 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, name, type);
3239 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3240
3241 DECL_ARTIFICIAL (subprog_decl) = artificial_p;
3242 DECL_EXTERNAL (subprog_decl) = extern_flag;
3243 TREE_PUBLIC (subprog_decl) = public_flag;
3244
3245 if (!debug_info_p)
3246 DECL_IGNORED_P (subprog_decl) = 1;
3247 if (definition)
3248 DECL_FUNCTION_IS_DEF (subprog_decl) = 1;
3249
3250 switch (inline_status)
3251 {
3252 case is_suppressed:
3253 DECL_UNINLINABLE (subprog_decl) = 1;
3254 break;
3255
3256 case is_disabled:
3257 break;
3258
3259 case is_required:
3260 if (Back_End_Inlining)
3261 {
3262 decl_attributes (&subprog_decl,
3263 tree_cons (get_identifier ("always_inline"),
3264 NULL_TREE, NULL_TREE),
3265 ATTR_FLAG_TYPE_IN_PLACE);
3266
3267 /* Inline_Always guarantees that every direct call is inlined and
3268 that there is no indirect reference to the subprogram, so the
3269 instance in the original package (as well as its clones in the
3270 client packages created for inter-unit inlining) can be made
3271 private, which causes the out-of-line body to be eliminated. */
3272 TREE_PUBLIC (subprog_decl) = 0;
3273 }
3274
3275 /* ... fall through ... */
3276
3277 case is_enabled:
3278 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3279 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_p;
3280 break;
3281
3282 default:
3283 gcc_unreachable ();
3284 }
3285
3286 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3287
3288 /* Once everything is processed, finish the subprogram declaration. */
3289 finish_subprog_decl (subprog_decl, asm_name, type);
3290
3291 /* Add this decl to the current binding level. */
3292 gnat_pushdecl (subprog_decl, gnat_node);
3293
3294 /* Output the assembler code and/or RTL for the declaration. */
3295 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3296
3297 return subprog_decl;
3298 }
3299
3300 /* Given a subprogram declaration DECL, its assembler name and its type,
3301 finish constructing the subprogram declaration from ASM_NAME and TYPE. */
3302
3303 void
3304 finish_subprog_decl (tree decl, tree asm_name, tree type)
3305 {
3306 tree result_decl
3307 = build_decl (DECL_SOURCE_LOCATION (decl), RESULT_DECL, NULL_TREE,
3308 TREE_TYPE (type));
3309
3310 DECL_ARTIFICIAL (result_decl) = 1;
3311 DECL_IGNORED_P (result_decl) = 1;
3312 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (type);
3313 DECL_RESULT (decl) = result_decl;
3314
3315 TREE_READONLY (decl) = TYPE_READONLY (type);
3316 TREE_SIDE_EFFECTS (decl) = TREE_THIS_VOLATILE (decl) = TYPE_VOLATILE (type);
3317
3318 if (asm_name)
3319 {
3320 /* Let the target mangle the name if this isn't a verbatim asm. */
3321 if (*IDENTIFIER_POINTER (asm_name) != '*')
3322 asm_name = targetm.mangle_decl_assembler_name (decl, asm_name);
3323
3324 SET_DECL_ASSEMBLER_NAME (decl, asm_name);
3325
3326 /* The expand_main_function circuitry expects "main_identifier_node" to
3327 designate the DECL_NAME of the 'main' entry point, in turn expected
3328 to be declared as the "main" function literally by default. Ada
3329 program entry points are typically declared with a different name
3330 within the binder generated file, exported as 'main' to satisfy the
3331 system expectations. Force main_identifier_node in this case. */
3332 if (asm_name == main_identifier_node)
3333 DECL_NAME (decl) = main_identifier_node;
3334 }
3335 }
3336 \f
3337 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3338 body. This routine needs to be invoked before processing the declarations
3339 appearing in the subprogram. */
3340
3341 void
3342 begin_subprog_body (tree subprog_decl)
3343 {
3344 tree param_decl;
3345
3346 announce_function (subprog_decl);
3347
3348 /* This function is being defined. */
3349 TREE_STATIC (subprog_decl) = 1;
3350
3351 /* The failure of this assertion will likely come from a wrong context for
3352 the subprogram body, e.g. another procedure for a procedure declared at
3353 library level. */
3354 gcc_assert (current_function_decl == decl_function_context (subprog_decl));
3355
3356 current_function_decl = subprog_decl;
3357
3358 /* Enter a new binding level and show that all the parameters belong to
3359 this function. */
3360 gnat_pushlevel ();
3361
3362 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3363 param_decl = DECL_CHAIN (param_decl))
3364 DECL_CONTEXT (param_decl) = subprog_decl;
3365
3366 make_decl_rtl (subprog_decl);
3367 }
3368
3369 /* Finish translating the current subprogram and set its BODY. */
3370
3371 void
3372 end_subprog_body (tree body)
3373 {
3374 tree fndecl = current_function_decl;
3375
3376 /* Attach the BLOCK for this level to the function and pop the level. */
3377 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3378 DECL_INITIAL (fndecl) = current_binding_level->block;
3379 gnat_poplevel ();
3380
3381 /* Mark the RESULT_DECL as being in this subprogram. */
3382 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3383
3384 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3385 if (TREE_CODE (body) == BIND_EXPR)
3386 {
3387 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3388 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3389 }
3390
3391 DECL_SAVED_TREE (fndecl) = body;
3392
3393 current_function_decl = decl_function_context (fndecl);
3394 }
3395
3396 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3397
3398 void
3399 rest_of_subprog_body_compilation (tree subprog_decl)
3400 {
3401 /* We cannot track the location of errors past this point. */
3402 error_gnat_node = Empty;
3403
3404 /* If we're only annotating types, don't actually compile this function. */
3405 if (type_annotate_only)
3406 return;
3407
3408 /* Dump functions before gimplification. */
3409 dump_function (TDI_original, subprog_decl);
3410
3411 if (!decl_function_context (subprog_decl))
3412 cgraph_node::finalize_function (subprog_decl, false);
3413 else
3414 /* Register this function with cgraph just far enough to get it
3415 added to our parent's nested function list. */
3416 (void) cgraph_node::get_create (subprog_decl);
3417 }
3418
3419 tree
3420 gnat_builtin_function (tree decl)
3421 {
3422 gnat_pushdecl (decl, Empty);
3423 return decl;
3424 }
3425
3426 /* Return an integer type with the number of bits of precision given by
3427 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3428 it is a signed type. */
3429
3430 tree
3431 gnat_type_for_size (unsigned precision, int unsignedp)
3432 {
3433 tree t;
3434 char type_name[20];
3435
3436 if (precision <= 2 * MAX_BITS_PER_WORD
3437 && signed_and_unsigned_types[precision][unsignedp])
3438 return signed_and_unsigned_types[precision][unsignedp];
3439
3440 if (unsignedp)
3441 t = make_unsigned_type (precision);
3442 else
3443 t = make_signed_type (precision);
3444 TYPE_ARTIFICIAL (t) = 1;
3445
3446 if (precision <= 2 * MAX_BITS_PER_WORD)
3447 signed_and_unsigned_types[precision][unsignedp] = t;
3448
3449 if (!TYPE_NAME (t))
3450 {
3451 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3452 TYPE_NAME (t) = get_identifier (type_name);
3453 }
3454
3455 return t;
3456 }
3457
3458 /* Likewise for floating-point types. */
3459
3460 static tree
3461 float_type_for_precision (int precision, machine_mode mode)
3462 {
3463 tree t;
3464 char type_name[20];
3465
3466 if (float_types[(int) mode])
3467 return float_types[(int) mode];
3468
3469 float_types[(int) mode] = t = make_node (REAL_TYPE);
3470 TYPE_PRECISION (t) = precision;
3471 layout_type (t);
3472
3473 gcc_assert (TYPE_MODE (t) == mode);
3474 if (!TYPE_NAME (t))
3475 {
3476 sprintf (type_name, "FLOAT_%d", precision);
3477 TYPE_NAME (t) = get_identifier (type_name);
3478 }
3479
3480 return t;
3481 }
3482
3483 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3484 an unsigned type; otherwise a signed type is returned. */
3485
3486 tree
3487 gnat_type_for_mode (machine_mode mode, int unsignedp)
3488 {
3489 if (mode == BLKmode)
3490 return NULL_TREE;
3491
3492 if (mode == VOIDmode)
3493 return void_type_node;
3494
3495 if (COMPLEX_MODE_P (mode))
3496 return NULL_TREE;
3497
3498 scalar_float_mode float_mode;
3499 if (is_a <scalar_float_mode> (mode, &float_mode))
3500 return float_type_for_precision (GET_MODE_PRECISION (float_mode),
3501 float_mode);
3502
3503 scalar_int_mode int_mode;
3504 if (is_a <scalar_int_mode> (mode, &int_mode))
3505 return gnat_type_for_size (GET_MODE_BITSIZE (int_mode), unsignedp);
3506
3507 if (VECTOR_MODE_P (mode))
3508 {
3509 machine_mode inner_mode = GET_MODE_INNER (mode);
3510 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3511 if (inner_type)
3512 return build_vector_type_for_mode (inner_type, mode);
3513 }
3514
3515 return NULL_TREE;
3516 }
3517
3518 /* Return the signed or unsigned version of TYPE_NODE, a scalar type, the
3519 signedness being specified by UNSIGNEDP. */
3520
3521 tree
3522 gnat_signed_or_unsigned_type_for (int unsignedp, tree type_node)
3523 {
3524 if (type_node == char_type_node)
3525 return unsignedp ? unsigned_char_type_node : signed_char_type_node;
3526
3527 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), unsignedp);
3528
3529 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3530 {
3531 type = copy_type (type);
3532 TREE_TYPE (type) = type_node;
3533 }
3534 else if (TREE_TYPE (type_node)
3535 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3536 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3537 {
3538 type = copy_type (type);
3539 TREE_TYPE (type) = TREE_TYPE (type_node);
3540 }
3541
3542 return type;
3543 }
3544
3545 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3546 transparently converted to each other. */
3547
3548 int
3549 gnat_types_compatible_p (tree t1, tree t2)
3550 {
3551 enum tree_code code;
3552
3553 /* This is the default criterion. */
3554 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3555 return 1;
3556
3557 /* We only check structural equivalence here. */
3558 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3559 return 0;
3560
3561 /* Vector types are also compatible if they have the same number of subparts
3562 and the same form of (scalar) element type. */
3563 if (code == VECTOR_TYPE
3564 && known_eq (TYPE_VECTOR_SUBPARTS (t1), TYPE_VECTOR_SUBPARTS (t2))
3565 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3566 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3567 return 1;
3568
3569 /* Array types are also compatible if they are constrained and have the same
3570 domain(s), the same component type and the same scalar storage order. */
3571 if (code == ARRAY_TYPE
3572 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3573 || (TYPE_DOMAIN (t1)
3574 && TYPE_DOMAIN (t2)
3575 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3576 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3577 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3578 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3579 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3580 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3581 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
3582 && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
3583 return 1;
3584
3585 return 0;
3586 }
3587
3588 /* Return true if EXPR is a useless type conversion. */
3589
3590 bool
3591 gnat_useless_type_conversion (tree expr)
3592 {
3593 if (CONVERT_EXPR_P (expr)
3594 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3595 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3596 return gnat_types_compatible_p (TREE_TYPE (expr),
3597 TREE_TYPE (TREE_OPERAND (expr, 0)));
3598
3599 return false;
3600 }
3601
3602 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3603
3604 bool
3605 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3606 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3607 {
3608 return TYPE_CI_CO_LIST (t) == cico_list
3609 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3610 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3611 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3612 }
3613 \f
3614 /* EXP is an expression for the size of an object. If this size contains
3615 discriminant references, replace them with the maximum (if MAX_P) or
3616 minimum (if !MAX_P) possible value of the discriminant. */
3617
3618 tree
3619 max_size (tree exp, bool max_p)
3620 {
3621 enum tree_code code = TREE_CODE (exp);
3622 tree type = TREE_TYPE (exp);
3623 tree op0, op1, op2;
3624
3625 switch (TREE_CODE_CLASS (code))
3626 {
3627 case tcc_declaration:
3628 case tcc_constant:
3629 return exp;
3630
3631 case tcc_exceptional:
3632 gcc_assert (code == SSA_NAME);
3633 return exp;
3634
3635 case tcc_vl_exp:
3636 if (code == CALL_EXPR)
3637 {
3638 tree t, *argarray;
3639 int n, i;
3640
3641 t = maybe_inline_call_in_expr (exp);
3642 if (t)
3643 return max_size (t, max_p);
3644
3645 n = call_expr_nargs (exp);
3646 gcc_assert (n > 0);
3647 argarray = XALLOCAVEC (tree, n);
3648 for (i = 0; i < n; i++)
3649 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3650 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3651 }
3652 break;
3653
3654 case tcc_reference:
3655 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3656 modify. Otherwise, we treat it like a variable. */
3657 if (CONTAINS_PLACEHOLDER_P (exp))
3658 {
3659 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3660 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3661 return
3662 convert (type,
3663 max_size (convert (get_base_type (val_type), val), true));
3664 }
3665
3666 return exp;
3667
3668 case tcc_comparison:
3669 return build_int_cst (type, max_p ? 1 : 0);
3670
3671 case tcc_unary:
3672 if (code == NON_LVALUE_EXPR)
3673 return max_size (TREE_OPERAND (exp, 0), max_p);
3674
3675 op0 = max_size (TREE_OPERAND (exp, 0),
3676 code == NEGATE_EXPR ? !max_p : max_p);
3677
3678 if (op0 == TREE_OPERAND (exp, 0))
3679 return exp;
3680
3681 return fold_build1 (code, type, op0);
3682
3683 case tcc_binary:
3684 {
3685 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3686 tree rhs = max_size (TREE_OPERAND (exp, 1),
3687 code == MINUS_EXPR ? !max_p : max_p);
3688
3689 /* Special-case wanting the maximum value of a MIN_EXPR.
3690 In that case, if one side overflows, return the other. */
3691 if (max_p && code == MIN_EXPR)
3692 {
3693 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3694 return lhs;
3695
3696 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3697 return rhs;
3698 }
3699
3700 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3701 overflowing and the RHS a variable. */
3702 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3703 && TREE_CODE (lhs) == INTEGER_CST
3704 && TREE_OVERFLOW (lhs)
3705 && TREE_CODE (rhs) != INTEGER_CST)
3706 return lhs;
3707
3708 /* If we are going to subtract a "negative" value in an unsigned type,
3709 do the operation as an addition of the negated value, in order to
3710 avoid creating a spurious overflow below. */
3711 if (code == MINUS_EXPR
3712 && TYPE_UNSIGNED (type)
3713 && TREE_CODE (rhs) == INTEGER_CST
3714 && !TREE_OVERFLOW (rhs)
3715 && tree_int_cst_sign_bit (rhs) != 0)
3716 {
3717 rhs = fold_build1 (NEGATE_EXPR, type, rhs);
3718 code = PLUS_EXPR;
3719 }
3720
3721 if (lhs == TREE_OPERAND (exp, 0) && rhs == TREE_OPERAND (exp, 1))
3722 return exp;
3723
3724 /* We need to detect overflows so we call size_binop here. */
3725 return size_binop (code, lhs, rhs);
3726 }
3727
3728 case tcc_expression:
3729 switch (TREE_CODE_LENGTH (code))
3730 {
3731 case 1:
3732 if (code == SAVE_EXPR)
3733 return exp;
3734
3735 op0 = max_size (TREE_OPERAND (exp, 0),
3736 code == TRUTH_NOT_EXPR ? !max_p : max_p);
3737
3738 if (op0 == TREE_OPERAND (exp, 0))
3739 return exp;
3740
3741 return fold_build1 (code, type, op0);
3742
3743 case 2:
3744 if (code == COMPOUND_EXPR)
3745 return max_size (TREE_OPERAND (exp, 1), max_p);
3746
3747 op0 = max_size (TREE_OPERAND (exp, 0), max_p);
3748 op1 = max_size (TREE_OPERAND (exp, 1), max_p);
3749
3750 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1))
3751 return exp;
3752
3753 return fold_build2 (code, type, op0, op1);
3754
3755 case 3:
3756 if (code == COND_EXPR)
3757 {
3758 op1 = TREE_OPERAND (exp, 1);
3759 op2 = TREE_OPERAND (exp, 2);
3760
3761 if (!op1 || !op2)
3762 return exp;
3763
3764 return
3765 fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3766 max_size (op1, max_p), max_size (op2, max_p));
3767 }
3768 break;
3769
3770 default:
3771 break;
3772 }
3773
3774 /* Other tree classes cannot happen. */
3775 default:
3776 break;
3777 }
3778
3779 gcc_unreachable ();
3780 }
3781 \f
3782 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3783 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3784 Return a constructor for the template. */
3785
3786 tree
3787 build_template (tree template_type, tree array_type, tree expr)
3788 {
3789 vec<constructor_elt, va_gc> *template_elts = NULL;
3790 tree bound_list = NULL_TREE;
3791 tree field;
3792
3793 while (TREE_CODE (array_type) == RECORD_TYPE
3794 && (TYPE_PADDING_P (array_type)
3795 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3796 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3797
3798 if (TREE_CODE (array_type) == ARRAY_TYPE
3799 || (TREE_CODE (array_type) == INTEGER_TYPE
3800 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3801 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3802
3803 /* First make the list for a CONSTRUCTOR for the template. Go down the
3804 field list of the template instead of the type chain because this
3805 array might be an Ada array of arrays and we can't tell where the
3806 nested arrays stop being the underlying object. */
3807
3808 for (field = TYPE_FIELDS (template_type); field;
3809 (bound_list
3810 ? (bound_list = TREE_CHAIN (bound_list))
3811 : (array_type = TREE_TYPE (array_type))),
3812 field = DECL_CHAIN (DECL_CHAIN (field)))
3813 {
3814 tree bounds, min, max;
3815
3816 /* If we have a bound list, get the bounds from there. Likewise
3817 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3818 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3819 This will give us a maximum range. */
3820 if (bound_list)
3821 bounds = TREE_VALUE (bound_list);
3822 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3823 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3824 else if (expr && TREE_CODE (expr) == PARM_DECL
3825 && DECL_BY_COMPONENT_PTR_P (expr))
3826 bounds = TREE_TYPE (field);
3827 else
3828 gcc_unreachable ();
3829
3830 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3831 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3832
3833 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3834 substitute it from OBJECT. */
3835 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3836 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3837
3838 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3839 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3840 }
3841
3842 return gnat_build_constructor (template_type, template_elts);
3843 }
3844 \f
3845 /* Return true if TYPE is suitable for the element type of a vector. */
3846
3847 static bool
3848 type_for_vector_element_p (tree type)
3849 {
3850 machine_mode mode;
3851
3852 if (!INTEGRAL_TYPE_P (type)
3853 && !SCALAR_FLOAT_TYPE_P (type)
3854 && !FIXED_POINT_TYPE_P (type))
3855 return false;
3856
3857 mode = TYPE_MODE (type);
3858 if (GET_MODE_CLASS (mode) != MODE_INT
3859 && !SCALAR_FLOAT_MODE_P (mode)
3860 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3861 return false;
3862
3863 return true;
3864 }
3865
3866 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3867 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3868 attribute declaration and want to issue error messages on failure. */
3869
3870 static tree
3871 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3872 {
3873 unsigned HOST_WIDE_INT size_int, inner_size_int;
3874 int nunits;
3875
3876 /* Silently punt on variable sizes. We can't make vector types for them,
3877 need to ignore them on front-end generated subtypes of unconstrained
3878 base types, and this attribute is for binding implementors, not end
3879 users, so we should never get there from legitimate explicit uses. */
3880 if (!tree_fits_uhwi_p (size))
3881 return NULL_TREE;
3882 size_int = tree_to_uhwi (size);
3883
3884 if (!type_for_vector_element_p (inner_type))
3885 {
3886 if (attribute)
3887 error ("invalid element type for attribute %qs",
3888 IDENTIFIER_POINTER (attribute));
3889 return NULL_TREE;
3890 }
3891 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3892
3893 if (size_int % inner_size_int)
3894 {
3895 if (attribute)
3896 error ("vector size not an integral multiple of component size");
3897 return NULL_TREE;
3898 }
3899
3900 if (size_int == 0)
3901 {
3902 if (attribute)
3903 error ("zero vector size");
3904 return NULL_TREE;
3905 }
3906
3907 nunits = size_int / inner_size_int;
3908 if (nunits & (nunits - 1))
3909 {
3910 if (attribute)
3911 error ("number of components of vector not a power of two");
3912 return NULL_TREE;
3913 }
3914
3915 return build_vector_type (inner_type, nunits);
3916 }
3917
3918 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3919 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3920 processing the attribute and want to issue error messages on failure. */
3921
3922 static tree
3923 build_vector_type_for_array (tree array_type, tree attribute)
3924 {
3925 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3926 TYPE_SIZE_UNIT (array_type),
3927 attribute);
3928 if (!vector_type)
3929 return NULL_TREE;
3930
3931 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3932 return vector_type;
3933 }
3934 \f
3935 /* Build a type to be used to represent an aliased object whose nominal type
3936 is an unconstrained array. This consists of a RECORD_TYPE containing a
3937 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3938 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3939 an arbitrary unconstrained object. Use NAME as the name of the record.
3940 DEBUG_INFO_P is true if we need to write debug information for the type. */
3941
3942 tree
3943 build_unc_object_type (tree template_type, tree object_type, tree name,
3944 bool debug_info_p)
3945 {
3946 tree decl;
3947 tree type = make_node (RECORD_TYPE);
3948 tree template_field
3949 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3950 NULL_TREE, NULL_TREE, 0, 1);
3951 tree array_field
3952 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3953 NULL_TREE, NULL_TREE, 0, 1);
3954
3955 TYPE_NAME (type) = name;
3956 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3957 DECL_CHAIN (template_field) = array_field;
3958 finish_record_type (type, template_field, 0, true);
3959
3960 /* Declare it now since it will never be declared otherwise. This is
3961 necessary to ensure that its subtrees are properly marked. */
3962 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3963
3964 /* template_type will not be used elsewhere than here, so to keep the debug
3965 info clean and in order to avoid scoping issues, make decl its
3966 context. */
3967 gnat_set_type_context (template_type, decl);
3968
3969 return type;
3970 }
3971
3972 /* Same, taking a thin or fat pointer type instead of a template type. */
3973
3974 tree
3975 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3976 tree name, bool debug_info_p)
3977 {
3978 tree template_type;
3979
3980 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3981
3982 template_type
3983 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3984 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3985 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3986
3987 return
3988 build_unc_object_type (template_type, object_type, name, debug_info_p);
3989 }
3990 \f
3991 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3992 In the normal case this is just two adjustments, but we have more to
3993 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3994
3995 void
3996 update_pointer_to (tree old_type, tree new_type)
3997 {
3998 tree ptr = TYPE_POINTER_TO (old_type);
3999 tree ref = TYPE_REFERENCE_TO (old_type);
4000 tree t;
4001
4002 /* If this is the main variant, process all the other variants first. */
4003 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4004 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4005 update_pointer_to (t, new_type);
4006
4007 /* If no pointers and no references, we are done. */
4008 if (!ptr && !ref)
4009 return;
4010
4011 /* Merge the old type qualifiers in the new type.
4012
4013 Each old variant has qualifiers for specific reasons, and the new
4014 designated type as well. Each set of qualifiers represents useful
4015 information grabbed at some point, and merging the two simply unifies
4016 these inputs into the final type description.
4017
4018 Consider for instance a volatile type frozen after an access to constant
4019 type designating it; after the designated type's freeze, we get here with
4020 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4021 when the access type was processed. We will make a volatile and readonly
4022 designated type, because that's what it really is.
4023
4024 We might also get here for a non-dummy OLD_TYPE variant with different
4025 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4026 to private record type elaboration (see the comments around the call to
4027 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4028 the qualifiers in those cases too, to avoid accidentally discarding the
4029 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4030 new_type
4031 = build_qualified_type (new_type,
4032 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4033
4034 /* If old type and new type are identical, there is nothing to do. */
4035 if (old_type == new_type)
4036 return;
4037
4038 /* Otherwise, first handle the simple case. */
4039 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4040 {
4041 tree new_ptr, new_ref;
4042
4043 /* If pointer or reference already points to new type, nothing to do.
4044 This can happen as update_pointer_to can be invoked multiple times
4045 on the same couple of types because of the type variants. */
4046 if ((ptr && TREE_TYPE (ptr) == new_type)
4047 || (ref && TREE_TYPE (ref) == new_type))
4048 return;
4049
4050 /* Chain PTR and its variants at the end. */
4051 new_ptr = TYPE_POINTER_TO (new_type);
4052 if (new_ptr)
4053 {
4054 while (TYPE_NEXT_PTR_TO (new_ptr))
4055 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4056 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4057 }
4058 else
4059 TYPE_POINTER_TO (new_type) = ptr;
4060
4061 /* Now adjust them. */
4062 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4063 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4064 {
4065 TREE_TYPE (t) = new_type;
4066 if (TYPE_NULL_BOUNDS (t))
4067 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4068 }
4069
4070 /* Chain REF and its variants at the end. */
4071 new_ref = TYPE_REFERENCE_TO (new_type);
4072 if (new_ref)
4073 {
4074 while (TYPE_NEXT_REF_TO (new_ref))
4075 new_ref = TYPE_NEXT_REF_TO (new_ref);
4076 TYPE_NEXT_REF_TO (new_ref) = ref;
4077 }
4078 else
4079 TYPE_REFERENCE_TO (new_type) = ref;
4080
4081 /* Now adjust them. */
4082 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4083 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4084 TREE_TYPE (t) = new_type;
4085
4086 TYPE_POINTER_TO (old_type) = NULL_TREE;
4087 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4088 }
4089
4090 /* Now deal with the unconstrained array case. In this case the pointer
4091 is actually a record where both fields are pointers to dummy nodes.
4092 Turn them into pointers to the correct types using update_pointer_to.
4093 Likewise for the pointer to the object record (thin pointer). */
4094 else
4095 {
4096 tree new_ptr = TYPE_POINTER_TO (new_type);
4097
4098 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4099
4100 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4101 since update_pointer_to can be invoked multiple times on the same
4102 couple of types because of the type variants. */
4103 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4104 return;
4105
4106 update_pointer_to
4107 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4108 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4109
4110 update_pointer_to
4111 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4112 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4113
4114 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4115 TYPE_OBJECT_RECORD_TYPE (new_type));
4116
4117 TYPE_POINTER_TO (old_type) = NULL_TREE;
4118 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4119 }
4120 }
4121 \f
4122 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4123 unconstrained one. This involves making or finding a template. */
4124
4125 static tree
4126 convert_to_fat_pointer (tree type, tree expr)
4127 {
4128 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4129 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4130 tree etype = TREE_TYPE (expr);
4131 tree template_addr;
4132 vec<constructor_elt, va_gc> *v;
4133 vec_alloc (v, 2);
4134
4135 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4136 array (compare_fat_pointers ensures that this is the full discriminant)
4137 and a valid pointer to the bounds. This latter property is necessary
4138 since the compiler can hoist the load of the bounds done through it. */
4139 if (integer_zerop (expr))
4140 {
4141 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4142 tree null_bounds, t;
4143
4144 if (TYPE_NULL_BOUNDS (ptr_template_type))
4145 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4146 else
4147 {
4148 /* The template type can still be dummy at this point so we build an
4149 empty constructor. The middle-end will fill it in with zeros. */
4150 t = build_constructor (template_type, NULL);
4151 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4152 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4153 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4154 }
4155
4156 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4157 fold_convert (p_array_type, null_pointer_node));
4158 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4159 t = build_constructor (type, v);
4160 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4161 TREE_CONSTANT (t) = 0;
4162 TREE_STATIC (t) = 1;
4163
4164 return t;
4165 }
4166
4167 /* If EXPR is a thin pointer, make template and data from the record. */
4168 if (TYPE_IS_THIN_POINTER_P (etype))
4169 {
4170 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4171
4172 expr = gnat_protect_expr (expr);
4173
4174 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4175 the thin pointer value has been shifted so we shift it back to get
4176 the template address. */
4177 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4178 {
4179 template_addr
4180 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4181 fold_build1 (NEGATE_EXPR, sizetype,
4182 byte_position
4183 (DECL_CHAIN (field))));
4184 template_addr
4185 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4186 template_addr);
4187 }
4188
4189 /* Otherwise we explicitly take the address of the fields. */
4190 else
4191 {
4192 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4193 template_addr
4194 = build_unary_op (ADDR_EXPR, NULL_TREE,
4195 build_component_ref (expr, field, false));
4196 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4197 build_component_ref (expr, DECL_CHAIN (field),
4198 false));
4199 }
4200 }
4201
4202 /* Otherwise, build the constructor for the template. */
4203 else
4204 template_addr
4205 = build_unary_op (ADDR_EXPR, NULL_TREE,
4206 build_template (template_type, TREE_TYPE (etype),
4207 expr));
4208
4209 /* The final result is a constructor for the fat pointer.
4210
4211 If EXPR is an argument of a foreign convention subprogram, the type it
4212 points to is directly the component type. In this case, the expression
4213 type may not match the corresponding FIELD_DECL type at this point, so we
4214 call "convert" here to fix that up if necessary. This type consistency is
4215 required, for instance because it ensures that possible later folding of
4216 COMPONENT_REFs against this constructor always yields something of the
4217 same type as the initial reference.
4218
4219 Note that the call to "build_template" above is still fine because it
4220 will only refer to the provided TEMPLATE_TYPE in this case. */
4221 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4222 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4223 return gnat_build_constructor (type, v);
4224 }
4225 \f
4226 /* Create an expression whose value is that of EXPR,
4227 converted to type TYPE. The TREE_TYPE of the value
4228 is always TYPE. This function implements all reasonable
4229 conversions; callers should filter out those that are
4230 not permitted by the language being compiled. */
4231
4232 tree
4233 convert (tree type, tree expr)
4234 {
4235 tree etype = TREE_TYPE (expr);
4236 enum tree_code ecode = TREE_CODE (etype);
4237 enum tree_code code = TREE_CODE (type);
4238
4239 /* If the expression is already of the right type, we are done. */
4240 if (etype == type)
4241 return expr;
4242
4243 /* If both input and output have padding and are of variable size, do this
4244 as an unchecked conversion. Likewise if one is a mere variant of the
4245 other, so we avoid a pointless unpad/repad sequence. */
4246 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4247 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4248 && (!TREE_CONSTANT (TYPE_SIZE (type))
4249 || !TREE_CONSTANT (TYPE_SIZE (etype))
4250 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4251 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4252 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4253 ;
4254
4255 /* If the output type has padding, convert to the inner type and make a
4256 constructor to build the record, unless a variable size is involved. */
4257 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4258 {
4259 /* If we previously converted from another type and our type is
4260 of variable size, remove the conversion to avoid the need for
4261 variable-sized temporaries. Likewise for a conversion between
4262 original and packable version. */
4263 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4264 && (!TREE_CONSTANT (TYPE_SIZE (type))
4265 || (ecode == RECORD_TYPE
4266 && TYPE_NAME (etype)
4267 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4268 expr = TREE_OPERAND (expr, 0);
4269
4270 /* If we are just removing the padding from expr, convert the original
4271 object if we have variable size in order to avoid the need for some
4272 variable-sized temporaries. Likewise if the padding is a variant
4273 of the other, so we avoid a pointless unpad/repad sequence. */
4274 if (TREE_CODE (expr) == COMPONENT_REF
4275 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4276 && (!TREE_CONSTANT (TYPE_SIZE (type))
4277 || TYPE_MAIN_VARIANT (type)
4278 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4279 || (ecode == RECORD_TYPE
4280 && TYPE_NAME (etype)
4281 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4282 return convert (type, TREE_OPERAND (expr, 0));
4283
4284 /* If the inner type is of self-referential size and the expression type
4285 is a record, do this as an unchecked conversion unless both types are
4286 essentially the same. But first pad the expression if possible to
4287 have the same size on both sides. */
4288 if (ecode == RECORD_TYPE
4289 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type)))
4290 && TYPE_MAIN_VARIANT (etype)
4291 != TYPE_MAIN_VARIANT (TREE_TYPE (TYPE_FIELDS (type))))
4292 {
4293 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4294 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4295 false, false, false, true),
4296 expr);
4297 return unchecked_convert (type, expr, false);
4298 }
4299
4300 /* If we are converting between array types with variable size, do the
4301 final conversion as an unchecked conversion, again to avoid the need
4302 for some variable-sized temporaries. If valid, this conversion is
4303 very likely purely technical and without real effects. */
4304 if (ecode == ARRAY_TYPE
4305 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4306 && !TREE_CONSTANT (TYPE_SIZE (etype))
4307 && !TREE_CONSTANT (TYPE_SIZE (type)))
4308 return unchecked_convert (type,
4309 convert (TREE_TYPE (TYPE_FIELDS (type)),
4310 expr),
4311 false);
4312
4313 tree t = convert (TREE_TYPE (TYPE_FIELDS (type)), expr);
4314
4315 /* If converting to the inner type has already created a CONSTRUCTOR with
4316 the right size, then reuse it instead of creating another one. This
4317 can happen for the padding type built to overalign local variables. */
4318 if (TREE_CODE (t) == VIEW_CONVERT_EXPR
4319 && TREE_CODE (TREE_OPERAND (t, 0)) == CONSTRUCTOR
4320 && TREE_CONSTANT (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0))))
4321 && tree_int_cst_equal (TYPE_SIZE (type),
4322 TYPE_SIZE (TREE_TYPE (TREE_OPERAND (t, 0)))))
4323 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (t, 0));
4324
4325 vec<constructor_elt, va_gc> *v;
4326 vec_alloc (v, 1);
4327 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), t);
4328 return gnat_build_constructor (type, v);
4329 }
4330
4331 /* If the input type has padding, remove it and convert to the output type.
4332 The conditions ordering is arranged to ensure that the output type is not
4333 a padding type here, as it is not clear whether the conversion would
4334 always be correct if this was to happen. */
4335 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4336 {
4337 tree unpadded;
4338
4339 /* If we have just converted to this padded type, just get the
4340 inner expression. */
4341 if (TREE_CODE (expr) == CONSTRUCTOR)
4342 unpadded = CONSTRUCTOR_ELT (expr, 0)->value;
4343
4344 /* Otherwise, build an explicit component reference. */
4345 else
4346 unpadded = build_component_ref (expr, TYPE_FIELDS (etype), false);
4347
4348 return convert (type, unpadded);
4349 }
4350
4351 /* If the input is a biased type, convert first to the base type and add
4352 the bias. Note that the bias must go through a full conversion to the
4353 base type, lest it is itself a biased value; this happens for subtypes
4354 of biased types. */
4355 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4356 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4357 fold_convert (TREE_TYPE (etype), expr),
4358 convert (TREE_TYPE (etype),
4359 TYPE_MIN_VALUE (etype))));
4360
4361 /* If the input is a justified modular type, we need to extract the actual
4362 object before converting it to any other type with the exceptions of an
4363 unconstrained array or of a mere type variant. It is useful to avoid the
4364 extraction and conversion in the type variant case because it could end
4365 up replacing a VAR_DECL expr by a constructor and we might be about the
4366 take the address of the result. */
4367 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4368 && code != UNCONSTRAINED_ARRAY_TYPE
4369 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4370 return
4371 convert (type, build_component_ref (expr, TYPE_FIELDS (etype), false));
4372
4373 /* If converting to a type that contains a template, convert to the data
4374 type and then build the template. */
4375 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4376 {
4377 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4378 vec<constructor_elt, va_gc> *v;
4379 vec_alloc (v, 2);
4380
4381 /* If the source already has a template, get a reference to the
4382 associated array only, as we are going to rebuild a template
4383 for the target type anyway. */
4384 expr = maybe_unconstrained_array (expr);
4385
4386 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4387 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4388 obj_type, NULL_TREE));
4389 if (expr)
4390 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4391 convert (obj_type, expr));
4392 return gnat_build_constructor (type, v);
4393 }
4394
4395 /* There are some cases of expressions that we process specially. */
4396 switch (TREE_CODE (expr))
4397 {
4398 case ERROR_MARK:
4399 return expr;
4400
4401 case NULL_EXPR:
4402 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4403 conversion in gnat_expand_expr. NULL_EXPR does not represent
4404 and actual value, so no conversion is needed. */
4405 expr = copy_node (expr);
4406 TREE_TYPE (expr) = type;
4407 return expr;
4408
4409 case STRING_CST:
4410 /* If we are converting a STRING_CST to another constrained array type,
4411 just make a new one in the proper type. */
4412 if (code == ecode && AGGREGATE_TYPE_P (etype)
4413 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4414 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4415 {
4416 expr = copy_node (expr);
4417 TREE_TYPE (expr) = type;
4418 return expr;
4419 }
4420 break;
4421
4422 case VECTOR_CST:
4423 /* If we are converting a VECTOR_CST to a mere type variant, just make
4424 a new one in the proper type. */
4425 if (code == ecode && gnat_types_compatible_p (type, etype))
4426 {
4427 expr = copy_node (expr);
4428 TREE_TYPE (expr) = type;
4429 return expr;
4430 }
4431 break;
4432
4433 case CONSTRUCTOR:
4434 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4435 another padding type around the same type, just make a new one in
4436 the proper type. */
4437 if (code == ecode
4438 && (gnat_types_compatible_p (type, etype)
4439 || (code == RECORD_TYPE
4440 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4441 && TREE_TYPE (TYPE_FIELDS (type))
4442 == TREE_TYPE (TYPE_FIELDS (etype)))))
4443 {
4444 expr = copy_node (expr);
4445 TREE_TYPE (expr) = type;
4446 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4447 return expr;
4448 }
4449
4450 /* Likewise for a conversion between original and packable version, or
4451 conversion between types of the same size and with the same list of
4452 fields, but we have to work harder to preserve type consistency. */
4453 if (code == ecode
4454 && code == RECORD_TYPE
4455 && (TYPE_NAME (type) == TYPE_NAME (etype)
4456 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4457
4458 {
4459 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4460 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4461 vec<constructor_elt, va_gc> *v;
4462 vec_alloc (v, len);
4463 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4464 unsigned HOST_WIDE_INT idx;
4465 tree index, value;
4466
4467 /* Whether we need to clear TREE_CONSTANT et al. on the output
4468 constructor when we convert in place. */
4469 bool clear_constant = false;
4470
4471 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4472 {
4473 /* Skip the missing fields in the CONSTRUCTOR. */
4474 while (efield && field && !SAME_FIELD_P (efield, index))
4475 {
4476 efield = DECL_CHAIN (efield);
4477 field = DECL_CHAIN (field);
4478 }
4479 /* The field must be the same. */
4480 if (!(efield && field && SAME_FIELD_P (efield, field)))
4481 break;
4482 constructor_elt elt
4483 = {field, convert (TREE_TYPE (field), value)};
4484 v->quick_push (elt);
4485
4486 /* If packing has made this field a bitfield and the input
4487 value couldn't be emitted statically any more, we need to
4488 clear TREE_CONSTANT on our output. */
4489 if (!clear_constant
4490 && TREE_CONSTANT (expr)
4491 && !CONSTRUCTOR_BITFIELD_P (efield)
4492 && CONSTRUCTOR_BITFIELD_P (field)
4493 && !initializer_constant_valid_for_bitfield_p (value))
4494 clear_constant = true;
4495
4496 efield = DECL_CHAIN (efield);
4497 field = DECL_CHAIN (field);
4498 }
4499
4500 /* If we have been able to match and convert all the input fields
4501 to their output type, convert in place now. We'll fallback to a
4502 view conversion downstream otherwise. */
4503 if (idx == len)
4504 {
4505 expr = copy_node (expr);
4506 TREE_TYPE (expr) = type;
4507 CONSTRUCTOR_ELTS (expr) = v;
4508 if (clear_constant)
4509 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4510 return expr;
4511 }
4512 }
4513
4514 /* Likewise for a conversion between array type and vector type with a
4515 compatible representative array. */
4516 else if (code == VECTOR_TYPE
4517 && ecode == ARRAY_TYPE
4518 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4519 etype))
4520 {
4521 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4522 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4523 vec<constructor_elt, va_gc> *v;
4524 unsigned HOST_WIDE_INT ix;
4525 tree value;
4526
4527 /* Build a VECTOR_CST from a *constant* array constructor. */
4528 if (TREE_CONSTANT (expr))
4529 {
4530 bool constant_p = true;
4531
4532 /* Iterate through elements and check if all constructor
4533 elements are *_CSTs. */
4534 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4535 if (!CONSTANT_CLASS_P (value))
4536 {
4537 constant_p = false;
4538 break;
4539 }
4540
4541 if (constant_p)
4542 return build_vector_from_ctor (type,
4543 CONSTRUCTOR_ELTS (expr));
4544 }
4545
4546 /* Otherwise, build a regular vector constructor. */
4547 vec_alloc (v, len);
4548 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4549 {
4550 constructor_elt elt = {NULL_TREE, value};
4551 v->quick_push (elt);
4552 }
4553 expr = copy_node (expr);
4554 TREE_TYPE (expr) = type;
4555 CONSTRUCTOR_ELTS (expr) = v;
4556 return expr;
4557 }
4558 break;
4559
4560 case UNCONSTRAINED_ARRAY_REF:
4561 /* First retrieve the underlying array. */
4562 expr = maybe_unconstrained_array (expr);
4563 etype = TREE_TYPE (expr);
4564 ecode = TREE_CODE (etype);
4565 break;
4566
4567 case VIEW_CONVERT_EXPR:
4568 {
4569 /* GCC 4.x is very sensitive to type consistency overall, and view
4570 conversions thus are very frequent. Even though just "convert"ing
4571 the inner operand to the output type is fine in most cases, it
4572 might expose unexpected input/output type mismatches in special
4573 circumstances so we avoid such recursive calls when we can. */
4574 tree op0 = TREE_OPERAND (expr, 0);
4575
4576 /* If we are converting back to the original type, we can just
4577 lift the input conversion. This is a common occurrence with
4578 switches back-and-forth amongst type variants. */
4579 if (type == TREE_TYPE (op0))
4580 return op0;
4581
4582 /* Otherwise, if we're converting between two aggregate or vector
4583 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4584 target type in place or to just convert the inner expression. */
4585 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4586 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4587 {
4588 /* If we are converting between mere variants, we can just
4589 substitute the VIEW_CONVERT_EXPR in place. */
4590 if (gnat_types_compatible_p (type, etype))
4591 return build1 (VIEW_CONVERT_EXPR, type, op0);
4592
4593 /* Otherwise, we may just bypass the input view conversion unless
4594 one of the types is a fat pointer, which is handled by
4595 specialized code below which relies on exact type matching. */
4596 else if (!TYPE_IS_FAT_POINTER_P (type)
4597 && !TYPE_IS_FAT_POINTER_P (etype))
4598 return convert (type, op0);
4599 }
4600
4601 break;
4602 }
4603
4604 default:
4605 break;
4606 }
4607
4608 /* Check for converting to a pointer to an unconstrained array. */
4609 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4610 return convert_to_fat_pointer (type, expr);
4611
4612 /* If we are converting between two aggregate or vector types that are mere
4613 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4614 to a vector type from its representative array type. */
4615 else if ((code == ecode
4616 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4617 && gnat_types_compatible_p (type, etype))
4618 || (code == VECTOR_TYPE
4619 && ecode == ARRAY_TYPE
4620 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4621 etype)))
4622 return build1 (VIEW_CONVERT_EXPR, type, expr);
4623
4624 /* If we are converting between tagged types, try to upcast properly. */
4625 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4626 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4627 {
4628 tree child_etype = etype;
4629 do {
4630 tree field = TYPE_FIELDS (child_etype);
4631 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4632 return build_component_ref (expr, field, false);
4633 child_etype = TREE_TYPE (field);
4634 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4635 }
4636
4637 /* If we are converting from a smaller form of record type back to it, just
4638 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4639 size on both sides. */
4640 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4641 && smaller_form_type_p (etype, type))
4642 {
4643 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4644 false, false, false, true),
4645 expr);
4646 return build1 (VIEW_CONVERT_EXPR, type, expr);
4647 }
4648
4649 /* In all other cases of related types, make a NOP_EXPR. */
4650 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4651 return fold_convert (type, expr);
4652
4653 switch (code)
4654 {
4655 case VOID_TYPE:
4656 return fold_build1 (CONVERT_EXPR, type, expr);
4657
4658 case INTEGER_TYPE:
4659 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4660 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4661 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4662 return unchecked_convert (type, expr, false);
4663
4664 /* If the output is a biased type, convert first to the base type and
4665 subtract the bias. Note that the bias itself must go through a full
4666 conversion to the base type, lest it is a biased value; this happens
4667 for subtypes of biased types. */
4668 if (TYPE_BIASED_REPRESENTATION_P (type))
4669 return fold_convert (type,
4670 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4671 convert (TREE_TYPE (type), expr),
4672 convert (TREE_TYPE (type),
4673 TYPE_MIN_VALUE (type))));
4674
4675 /* ... fall through ... */
4676
4677 case ENUMERAL_TYPE:
4678 case BOOLEAN_TYPE:
4679 /* If we are converting an additive expression to an integer type
4680 with lower precision, be wary of the optimization that can be
4681 applied by convert_to_integer. There are 2 problematic cases:
4682 - if the first operand was originally of a biased type,
4683 because we could be recursively called to convert it
4684 to an intermediate type and thus rematerialize the
4685 additive operator endlessly,
4686 - if the expression contains a placeholder, because an
4687 intermediate conversion that changes the sign could
4688 be inserted and thus introduce an artificial overflow
4689 at compile time when the placeholder is substituted. */
4690 if (code == INTEGER_TYPE
4691 && ecode == INTEGER_TYPE
4692 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4693 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4694 {
4695 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4696
4697 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4698 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4699 || CONTAINS_PLACEHOLDER_P (expr))
4700 return build1 (NOP_EXPR, type, expr);
4701 }
4702
4703 return fold (convert_to_integer (type, expr));
4704
4705 case POINTER_TYPE:
4706 case REFERENCE_TYPE:
4707 /* If converting between two thin pointers, adjust if needed to account
4708 for differing offsets from the base pointer, depending on whether
4709 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4710 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4711 {
4712 tree etype_pos
4713 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype))
4714 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4715 : size_zero_node;
4716 tree type_pos
4717 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))
4718 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4719 : size_zero_node;
4720 tree byte_diff = size_diffop (type_pos, etype_pos);
4721
4722 expr = build1 (NOP_EXPR, type, expr);
4723 if (integer_zerop (byte_diff))
4724 return expr;
4725
4726 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4727 fold_convert (sizetype, byte_diff));
4728 }
4729
4730 /* If converting fat pointer to normal or thin pointer, get the pointer
4731 to the array and then convert it. */
4732 if (TYPE_IS_FAT_POINTER_P (etype))
4733 expr = build_component_ref (expr, TYPE_FIELDS (etype), false);
4734
4735 return fold (convert_to_pointer (type, expr));
4736
4737 case REAL_TYPE:
4738 return fold (convert_to_real (type, expr));
4739
4740 case RECORD_TYPE:
4741 /* Do a normal conversion between scalar and justified modular type. */
4742 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4743 {
4744 vec<constructor_elt, va_gc> *v;
4745 vec_alloc (v, 1);
4746
4747 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4748 convert (TREE_TYPE (TYPE_FIELDS (type)),
4749 expr));
4750 return gnat_build_constructor (type, v);
4751 }
4752
4753 /* In these cases, assume the front-end has validated the conversion.
4754 If the conversion is valid, it will be a bit-wise conversion, so
4755 it can be viewed as an unchecked conversion. */
4756 return unchecked_convert (type, expr, false);
4757
4758 case ARRAY_TYPE:
4759 /* Do a normal conversion between unconstrained and constrained array
4760 type, assuming the latter is a constrained version of the former. */
4761 if (TREE_CODE (expr) == INDIRECT_REF
4762 && ecode == ARRAY_TYPE
4763 && TREE_TYPE (etype) == TREE_TYPE (type))
4764 {
4765 tree ptr_type = build_pointer_type (type);
4766 tree t = build_unary_op (INDIRECT_REF, NULL_TREE,
4767 fold_convert (ptr_type,
4768 TREE_OPERAND (expr, 0)));
4769 TREE_READONLY (t) = TREE_READONLY (expr);
4770 TREE_THIS_NOTRAP (t) = TREE_THIS_NOTRAP (expr);
4771 return t;
4772 }
4773
4774 /* In these cases, assume the front-end has validated the conversion.
4775 If the conversion is valid, it will be a bit-wise conversion, so
4776 it can be viewed as an unchecked conversion. */
4777 return unchecked_convert (type, expr, false);
4778
4779 case UNION_TYPE:
4780 /* This is a either a conversion between a tagged type and some
4781 subtype, which we have to mark as a UNION_TYPE because of
4782 overlapping fields or a conversion of an Unchecked_Union. */
4783 return unchecked_convert (type, expr, false);
4784
4785 case UNCONSTRAINED_ARRAY_TYPE:
4786 /* If the input is a VECTOR_TYPE, convert to the representative
4787 array type first. */
4788 if (ecode == VECTOR_TYPE)
4789 {
4790 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4791 etype = TREE_TYPE (expr);
4792 ecode = TREE_CODE (etype);
4793 }
4794
4795 /* If EXPR is a constrained array, take its address, convert it to a
4796 fat pointer, and then dereference it. Likewise if EXPR is a
4797 record containing both a template and a constrained array.
4798 Note that a record representing a justified modular type
4799 always represents a packed constrained array. */
4800 if (ecode == ARRAY_TYPE
4801 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4802 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4803 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4804 return
4805 build_unary_op
4806 (INDIRECT_REF, NULL_TREE,
4807 convert_to_fat_pointer (TREE_TYPE (type),
4808 build_unary_op (ADDR_EXPR,
4809 NULL_TREE, expr)));
4810
4811 /* Do something very similar for converting one unconstrained
4812 array to another. */
4813 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4814 return
4815 build_unary_op (INDIRECT_REF, NULL_TREE,
4816 convert (TREE_TYPE (type),
4817 build_unary_op (ADDR_EXPR,
4818 NULL_TREE, expr)));
4819 else
4820 gcc_unreachable ();
4821
4822 case COMPLEX_TYPE:
4823 return fold (convert_to_complex (type, expr));
4824
4825 default:
4826 gcc_unreachable ();
4827 }
4828 }
4829
4830 /* Create an expression whose value is that of EXPR converted to the common
4831 index type, which is sizetype. EXPR is supposed to be in the base type
4832 of the GNAT index type. Calling it is equivalent to doing
4833
4834 convert (sizetype, expr)
4835
4836 but we try to distribute the type conversion with the knowledge that EXPR
4837 cannot overflow in its type. This is a best-effort approach and we fall
4838 back to the above expression as soon as difficulties are encountered.
4839
4840 This is necessary to overcome issues that arise when the GNAT base index
4841 type and the GCC common index type (sizetype) don't have the same size,
4842 which is quite frequent on 64-bit architectures. In this case, and if
4843 the GNAT base index type is signed but the iteration type of the loop has
4844 been forced to unsigned, the loop scalar evolution engine cannot compute
4845 a simple evolution for the general induction variables associated with the
4846 array indices, because it will preserve the wrap-around semantics in the
4847 unsigned type of their "inner" part. As a result, many loop optimizations
4848 are blocked.
4849
4850 The solution is to use a special (basic) induction variable that is at
4851 least as large as sizetype, and to express the aforementioned general
4852 induction variables in terms of this induction variable, eliminating
4853 the problematic intermediate truncation to the GNAT base index type.
4854 This is possible as long as the original expression doesn't overflow
4855 and if the middle-end hasn't introduced artificial overflows in the
4856 course of the various simplification it can make to the expression. */
4857
4858 tree
4859 convert_to_index_type (tree expr)
4860 {
4861 enum tree_code code = TREE_CODE (expr);
4862 tree type = TREE_TYPE (expr);
4863
4864 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4865 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4866 if (TYPE_UNSIGNED (type) || !optimize)
4867 return convert (sizetype, expr);
4868
4869 switch (code)
4870 {
4871 case VAR_DECL:
4872 /* The main effect of the function: replace a loop parameter with its
4873 associated special induction variable. */
4874 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4875 expr = DECL_INDUCTION_VAR (expr);
4876 break;
4877
4878 CASE_CONVERT:
4879 {
4880 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4881 /* Bail out as soon as we suspect some sort of type frobbing. */
4882 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4883 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4884 break;
4885 }
4886
4887 /* ... fall through ... */
4888
4889 case NON_LVALUE_EXPR:
4890 return fold_build1 (code, sizetype,
4891 convert_to_index_type (TREE_OPERAND (expr, 0)));
4892
4893 case PLUS_EXPR:
4894 case MINUS_EXPR:
4895 case MULT_EXPR:
4896 return fold_build2 (code, sizetype,
4897 convert_to_index_type (TREE_OPERAND (expr, 0)),
4898 convert_to_index_type (TREE_OPERAND (expr, 1)));
4899
4900 case COMPOUND_EXPR:
4901 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4902 convert_to_index_type (TREE_OPERAND (expr, 1)));
4903
4904 case COND_EXPR:
4905 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4906 convert_to_index_type (TREE_OPERAND (expr, 1)),
4907 convert_to_index_type (TREE_OPERAND (expr, 2)));
4908
4909 default:
4910 break;
4911 }
4912
4913 return convert (sizetype, expr);
4914 }
4915 \f
4916 /* Remove all conversions that are done in EXP. This includes converting
4917 from a padded type or to a justified modular type. If TRUE_ADDRESS
4918 is true, always return the address of the containing object even if
4919 the address is not bit-aligned. */
4920
4921 tree
4922 remove_conversions (tree exp, bool true_address)
4923 {
4924 switch (TREE_CODE (exp))
4925 {
4926 case CONSTRUCTOR:
4927 if (true_address
4928 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4929 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4930 return
4931 remove_conversions (CONSTRUCTOR_ELT (exp, 0)->value, true);
4932 break;
4933
4934 case COMPONENT_REF:
4935 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4936 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4937 break;
4938
4939 CASE_CONVERT:
4940 case VIEW_CONVERT_EXPR:
4941 case NON_LVALUE_EXPR:
4942 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4943
4944 default:
4945 break;
4946 }
4947
4948 return exp;
4949 }
4950 \f
4951 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4952 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4953 likewise return an expression pointing to the underlying array. */
4954
4955 tree
4956 maybe_unconstrained_array (tree exp)
4957 {
4958 enum tree_code code = TREE_CODE (exp);
4959 tree type = TREE_TYPE (exp);
4960
4961 switch (TREE_CODE (type))
4962 {
4963 case UNCONSTRAINED_ARRAY_TYPE:
4964 if (code == UNCONSTRAINED_ARRAY_REF)
4965 {
4966 const bool read_only = TREE_READONLY (exp);
4967 const bool no_trap = TREE_THIS_NOTRAP (exp);
4968
4969 exp = TREE_OPERAND (exp, 0);
4970 type = TREE_TYPE (exp);
4971
4972 if (TREE_CODE (exp) == COND_EXPR)
4973 {
4974 tree op1
4975 = build_unary_op (INDIRECT_REF, NULL_TREE,
4976 build_component_ref (TREE_OPERAND (exp, 1),
4977 TYPE_FIELDS (type),
4978 false));
4979 tree op2
4980 = build_unary_op (INDIRECT_REF, NULL_TREE,
4981 build_component_ref (TREE_OPERAND (exp, 2),
4982 TYPE_FIELDS (type),
4983 false));
4984
4985 exp = build3 (COND_EXPR,
4986 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4987 TREE_OPERAND (exp, 0), op1, op2);
4988 }
4989 else
4990 {
4991 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4992 build_component_ref (exp,
4993 TYPE_FIELDS (type),
4994 false));
4995 TREE_READONLY (exp) = read_only;
4996 TREE_THIS_NOTRAP (exp) = no_trap;
4997 }
4998 }
4999
5000 else if (code == NULL_EXPR)
5001 exp = build1 (NULL_EXPR,
5002 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5003 TREE_OPERAND (exp, 0));
5004 break;
5005
5006 case RECORD_TYPE:
5007 /* If this is a padded type and it contains a template, convert to the
5008 unpadded type first. */
5009 if (TYPE_PADDING_P (type)
5010 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5011 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5012 {
5013 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5014 code = TREE_CODE (exp);
5015 type = TREE_TYPE (exp);
5016 }
5017
5018 if (TYPE_CONTAINS_TEMPLATE_P (type))
5019 {
5020 /* If the array initializer is a box, return NULL_TREE. */
5021 if (code == CONSTRUCTOR && CONSTRUCTOR_NELTS (exp) < 2)
5022 return NULL_TREE;
5023
5024 exp = build_component_ref (exp, DECL_CHAIN (TYPE_FIELDS (type)),
5025 false);
5026 type = TREE_TYPE (exp);
5027
5028 /* If the array type is padded, convert to the unpadded type. */
5029 if (TYPE_IS_PADDING_P (type))
5030 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5031 }
5032 break;
5033
5034 default:
5035 break;
5036 }
5037
5038 return exp;
5039 }
5040 \f
5041 /* Return true if EXPR is an expression that can be folded as an operand
5042 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5043
5044 static bool
5045 can_fold_for_view_convert_p (tree expr)
5046 {
5047 tree t1, t2;
5048
5049 /* The folder will fold NOP_EXPRs between integral types with the same
5050 precision (in the middle-end's sense). We cannot allow it if the
5051 types don't have the same precision in the Ada sense as well. */
5052 if (TREE_CODE (expr) != NOP_EXPR)
5053 return true;
5054
5055 t1 = TREE_TYPE (expr);
5056 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5057
5058 /* Defer to the folder for non-integral conversions. */
5059 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5060 return true;
5061
5062 /* Only fold conversions that preserve both precisions. */
5063 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5064 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5065 return true;
5066
5067 return false;
5068 }
5069
5070 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5071 If NOTRUNC_P is true, truncation operations should be suppressed.
5072
5073 Special care is required with (source or target) integral types whose
5074 precision is not equal to their size, to make sure we fetch or assign
5075 the value bits whose location might depend on the endianness, e.g.
5076
5077 Rmsize : constant := 8;
5078 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5079
5080 type Bit_Array is array (1 .. Rmsize) of Boolean;
5081 pragma Pack (Bit_Array);
5082
5083 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5084
5085 Value : Int := 2#1000_0001#;
5086 Vbits : Bit_Array := To_Bit_Array (Value);
5087
5088 we expect the 8 bits at Vbits'Address to always contain Value, while
5089 their original location depends on the endianness, at Value'Address
5090 on a little-endian architecture but not on a big-endian one.
5091
5092 One pitfall is that we cannot use TYPE_UNSIGNED directly to decide how
5093 the bits between the precision and the size are filled, because of the
5094 trick used in the E_Signed_Integer_Subtype case of gnat_to_gnu_entity.
5095 So we use the special predicate type_unsigned_for_rm above. */
5096
5097 tree
5098 unchecked_convert (tree type, tree expr, bool notrunc_p)
5099 {
5100 tree etype = TREE_TYPE (expr);
5101 enum tree_code ecode = TREE_CODE (etype);
5102 enum tree_code code = TREE_CODE (type);
5103 tree tem;
5104 int c;
5105
5106 /* If the expression is already of the right type, we are done. */
5107 if (etype == type)
5108 return expr;
5109
5110 /* If both types are integral just do a normal conversion.
5111 Likewise for a conversion to an unconstrained array. */
5112 if (((INTEGRAL_TYPE_P (type)
5113 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5114 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5115 && (INTEGRAL_TYPE_P (etype)
5116 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5117 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5118 || code == UNCONSTRAINED_ARRAY_TYPE)
5119 {
5120 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5121 {
5122 tree ntype = copy_type (etype);
5123 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5124 TYPE_MAIN_VARIANT (ntype) = ntype;
5125 expr = build1 (NOP_EXPR, ntype, expr);
5126 }
5127
5128 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5129 {
5130 tree rtype = copy_type (type);
5131 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5132 TYPE_MAIN_VARIANT (rtype) = rtype;
5133 expr = convert (rtype, expr);
5134 expr = build1 (NOP_EXPR, type, expr);
5135 }
5136 else
5137 expr = convert (type, expr);
5138 }
5139
5140 /* If we are converting to an integral type whose precision is not equal
5141 to its size, first unchecked convert to a record type that contains a
5142 field of the given precision. Then extract the result from the field.
5143
5144 There is a subtlety if the source type is an aggregate type with reverse
5145 storage order because its representation is not contiguous in the native
5146 storage order, i.e. a direct unchecked conversion to an integral type
5147 with N bits of precision cannot read the first N bits of the aggregate
5148 type. To overcome it, we do an unchecked conversion to an integral type
5149 with reverse storage order and return the resulting value. This also
5150 ensures that the result of the unchecked conversion doesn't depend on
5151 the endianness of the target machine, but only on the storage order of
5152 the aggregate type.
5153
5154 Finally, for the sake of consistency, we do the unchecked conversion
5155 to an integral type with reverse storage order as soon as the source
5156 type is an aggregate type with reverse storage order, even if there
5157 are no considerations of precision or size involved. */
5158 else if (INTEGRAL_TYPE_P (type)
5159 && TYPE_RM_SIZE (type)
5160 && (tree_int_cst_compare (TYPE_RM_SIZE (type),
5161 TYPE_SIZE (type)) < 0
5162 || (AGGREGATE_TYPE_P (etype)
5163 && TYPE_REVERSE_STORAGE_ORDER (etype))))
5164 {
5165 tree rec_type = make_node (RECORD_TYPE);
5166 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5167 tree field_type, field;
5168
5169 if (AGGREGATE_TYPE_P (etype))
5170 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5171 = TYPE_REVERSE_STORAGE_ORDER (etype);
5172
5173 if (type_unsigned_for_rm (type))
5174 field_type = make_unsigned_type (prec);
5175 else
5176 field_type = make_signed_type (prec);
5177 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5178
5179 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5180 NULL_TREE, bitsize_zero_node, 1, 0);
5181
5182 finish_record_type (rec_type, field, 1, false);
5183
5184 expr = unchecked_convert (rec_type, expr, notrunc_p);
5185 expr = build_component_ref (expr, field, false);
5186 expr = fold_build1 (NOP_EXPR, type, expr);
5187 }
5188
5189 /* Similarly if we are converting from an integral type whose precision is
5190 not equal to its size, first copy into a field of the given precision
5191 and unchecked convert the record type.
5192
5193 The same considerations as above apply if the target type is an aggregate
5194 type with reverse storage order and we also proceed similarly. */
5195 else if (INTEGRAL_TYPE_P (etype)
5196 && TYPE_RM_SIZE (etype)
5197 && (tree_int_cst_compare (TYPE_RM_SIZE (etype),
5198 TYPE_SIZE (etype)) < 0
5199 || (AGGREGATE_TYPE_P (type)
5200 && TYPE_REVERSE_STORAGE_ORDER (type))))
5201 {
5202 tree rec_type = make_node (RECORD_TYPE);
5203 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5204 vec<constructor_elt, va_gc> *v;
5205 vec_alloc (v, 1);
5206 tree field_type, field;
5207
5208 if (AGGREGATE_TYPE_P (type))
5209 TYPE_REVERSE_STORAGE_ORDER (rec_type)
5210 = TYPE_REVERSE_STORAGE_ORDER (type);
5211
5212 if (type_unsigned_for_rm (etype))
5213 field_type = make_unsigned_type (prec);
5214 else
5215 field_type = make_signed_type (prec);
5216 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5217
5218 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5219 NULL_TREE, bitsize_zero_node, 1, 0);
5220
5221 finish_record_type (rec_type, field, 1, false);
5222
5223 expr = fold_build1 (NOP_EXPR, field_type, expr);
5224 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5225 expr = gnat_build_constructor (rec_type, v);
5226 expr = unchecked_convert (type, expr, notrunc_p);
5227 }
5228
5229 /* If we are converting from a scalar type to a type with a different size,
5230 we need to pad to have the same size on both sides.
5231
5232 ??? We cannot do it unconditionally because unchecked conversions are
5233 used liberally by the front-end to implement polymorphism, e.g. in:
5234
5235 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5236 return p___size__4 (p__object!(S191s.all));
5237
5238 so we skip all expressions that are references. */
5239 else if (!REFERENCE_CLASS_P (expr)
5240 && !AGGREGATE_TYPE_P (etype)
5241 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5242 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5243 {
5244 if (c < 0)
5245 {
5246 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5247 false, false, false, true),
5248 expr);
5249 expr = unchecked_convert (type, expr, notrunc_p);
5250 }
5251 else
5252 {
5253 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5254 false, false, false, true);
5255 expr = unchecked_convert (rec_type, expr, notrunc_p);
5256 expr = build_component_ref (expr, TYPE_FIELDS (rec_type), false);
5257 }
5258 }
5259
5260 /* We have a special case when we are converting between two unconstrained
5261 array types. In that case, take the address, convert the fat pointer
5262 types, and dereference. */
5263 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5264 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5265 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5266 build_unary_op (ADDR_EXPR, NULL_TREE,
5267 expr)));
5268
5269 /* Another special case is when we are converting to a vector type from its
5270 representative array type; this a regular conversion. */
5271 else if (code == VECTOR_TYPE
5272 && ecode == ARRAY_TYPE
5273 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5274 etype))
5275 expr = convert (type, expr);
5276
5277 /* And, if the array type is not the representative, we try to build an
5278 intermediate vector type of which the array type is the representative
5279 and to do the unchecked conversion between the vector types, in order
5280 to enable further simplifications in the middle-end. */
5281 else if (code == VECTOR_TYPE
5282 && ecode == ARRAY_TYPE
5283 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5284 {
5285 expr = convert (tem, expr);
5286 return unchecked_convert (type, expr, notrunc_p);
5287 }
5288
5289 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5290 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5291 else if (TREE_CODE (expr) == CONSTRUCTOR
5292 && code == RECORD_TYPE
5293 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5294 {
5295 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5296 Empty, false, false, false, true),
5297 expr);
5298 return unchecked_convert (type, expr, notrunc_p);
5299 }
5300
5301 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5302 else
5303 {
5304 expr = maybe_unconstrained_array (expr);
5305 etype = TREE_TYPE (expr);
5306 ecode = TREE_CODE (etype);
5307 if (can_fold_for_view_convert_p (expr))
5308 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5309 else
5310 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5311 }
5312
5313 /* If the result is a non-biased integral type whose precision is not equal
5314 to its size, sign- or zero-extend the result. But we need not do this
5315 if the input is also an integral type and both are unsigned or both are
5316 signed and have the same precision. */
5317 if (!notrunc_p
5318 && INTEGRAL_TYPE_P (type)
5319 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5320 && TYPE_RM_SIZE (type)
5321 && tree_int_cst_compare (TYPE_RM_SIZE (type), TYPE_SIZE (type)) < 0
5322 && !(INTEGRAL_TYPE_P (etype)
5323 && type_unsigned_for_rm (type) == type_unsigned_for_rm (etype)
5324 && (type_unsigned_for_rm (type)
5325 || tree_int_cst_compare (TYPE_RM_SIZE (type),
5326 TYPE_RM_SIZE (etype)
5327 ? TYPE_RM_SIZE (etype)
5328 : TYPE_SIZE (etype)) == 0)))
5329 {
5330 if (integer_zerop (TYPE_RM_SIZE (type)))
5331 expr = build_int_cst (type, 0);
5332 else
5333 {
5334 tree base_type
5335 = gnat_type_for_size (TREE_INT_CST_LOW (TYPE_SIZE (type)),
5336 type_unsigned_for_rm (type));
5337 tree shift_expr
5338 = convert (base_type,
5339 size_binop (MINUS_EXPR,
5340 TYPE_SIZE (type), TYPE_RM_SIZE (type)));
5341 expr
5342 = convert (type,
5343 build_binary_op (RSHIFT_EXPR, base_type,
5344 build_binary_op (LSHIFT_EXPR, base_type,
5345 convert (base_type,
5346 expr),
5347 shift_expr),
5348 shift_expr));
5349 }
5350 }
5351
5352 /* An unchecked conversion should never raise Constraint_Error. The code
5353 below assumes that GCC's conversion routines overflow the same way that
5354 the underlying hardware does. This is probably true. In the rare case
5355 when it is false, we can rely on the fact that such conversions are
5356 erroneous anyway. */
5357 if (TREE_CODE (expr) == INTEGER_CST)
5358 TREE_OVERFLOW (expr) = 0;
5359
5360 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5361 show no longer constant. */
5362 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5363 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5364 OEP_ONLY_CONST))
5365 TREE_CONSTANT (expr) = 0;
5366
5367 return expr;
5368 }
5369 \f
5370 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5371 the latter being a record type as predicated by Is_Record_Type. */
5372
5373 enum tree_code
5374 tree_code_for_record_type (Entity_Id gnat_type)
5375 {
5376 Node_Id component_list, component;
5377
5378 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5379 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5380 if (!Is_Unchecked_Union (gnat_type))
5381 return RECORD_TYPE;
5382
5383 gnat_type = Implementation_Base_Type (gnat_type);
5384 component_list
5385 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5386
5387 for (component = First_Non_Pragma (Component_Items (component_list));
5388 Present (component);
5389 component = Next_Non_Pragma (component))
5390 if (Ekind (Defining_Entity (component)) == E_Component)
5391 return RECORD_TYPE;
5392
5393 return UNION_TYPE;
5394 }
5395
5396 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5397 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5398 according to the presence of an alignment clause on the type or, if it
5399 is an array, on the component type. */
5400
5401 bool
5402 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5403 {
5404 gnat_type = Underlying_Type (gnat_type);
5405
5406 *align_clause = Present (Alignment_Clause (gnat_type));
5407
5408 if (Is_Array_Type (gnat_type))
5409 {
5410 gnat_type = Underlying_Type (Component_Type (gnat_type));
5411 if (Present (Alignment_Clause (gnat_type)))
5412 *align_clause = true;
5413 }
5414
5415 if (!Is_Floating_Point_Type (gnat_type))
5416 return false;
5417
5418 if (UI_To_Int (Esize (gnat_type)) != 64)
5419 return false;
5420
5421 return true;
5422 }
5423
5424 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5425 size is greater or equal to 64 bits, or an array of such a type. Set
5426 ALIGN_CLAUSE according to the presence of an alignment clause on the
5427 type or, if it is an array, on the component type. */
5428
5429 bool
5430 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5431 {
5432 gnat_type = Underlying_Type (gnat_type);
5433
5434 *align_clause = Present (Alignment_Clause (gnat_type));
5435
5436 if (Is_Array_Type (gnat_type))
5437 {
5438 gnat_type = Underlying_Type (Component_Type (gnat_type));
5439 if (Present (Alignment_Clause (gnat_type)))
5440 *align_clause = true;
5441 }
5442
5443 if (!Is_Scalar_Type (gnat_type))
5444 return false;
5445
5446 if (UI_To_Int (Esize (gnat_type)) < 64)
5447 return false;
5448
5449 return true;
5450 }
5451
5452 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5453 component of an aggregate type. */
5454
5455 bool
5456 type_for_nonaliased_component_p (tree gnu_type)
5457 {
5458 /* If the type is passed by reference, we may have pointers to the
5459 component so it cannot be made non-aliased. */
5460 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5461 return false;
5462
5463 /* We used to say that any component of aggregate type is aliased
5464 because the front-end may take 'Reference of it. The front-end
5465 has been enhanced in the meantime so as to use a renaming instead
5466 in most cases, but the back-end can probably take the address of
5467 such a component too so we go for the conservative stance.
5468
5469 For instance, we might need the address of any array type, even
5470 if normally passed by copy, to construct a fat pointer if the
5471 component is used as an actual for an unconstrained formal.
5472
5473 Likewise for record types: even if a specific record subtype is
5474 passed by copy, the parent type might be passed by ref (e.g. if
5475 it's of variable size) and we might take the address of a child
5476 component to pass to a parent formal. We have no way to check
5477 for such conditions here. */
5478 if (AGGREGATE_TYPE_P (gnu_type))
5479 return false;
5480
5481 return true;
5482 }
5483
5484 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5485
5486 bool
5487 smaller_form_type_p (tree type, tree orig_type)
5488 {
5489 tree size, osize;
5490
5491 /* We're not interested in variants here. */
5492 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5493 return false;
5494
5495 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5496 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5497 return false;
5498
5499 size = TYPE_SIZE (type);
5500 osize = TYPE_SIZE (orig_type);
5501
5502 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5503 return false;
5504
5505 return tree_int_cst_lt (size, osize) != 0;
5506 }
5507
5508 /* Return whether EXPR, which is the renamed object in an object renaming
5509 declaration, can be materialized as a reference (with a REFERENCE_TYPE).
5510 This should be synchronized with Exp_Dbug.Debug_Renaming_Declaration. */
5511
5512 bool
5513 can_materialize_object_renaming_p (Node_Id expr)
5514 {
5515 while (true)
5516 {
5517 expr = Original_Node (expr);
5518
5519 switch Nkind (expr)
5520 {
5521 case N_Identifier:
5522 case N_Expanded_Name:
5523 if (!Present (Renamed_Object (Entity (expr))))
5524 return true;
5525 expr = Renamed_Object (Entity (expr));
5526 break;
5527
5528 case N_Selected_Component:
5529 {
5530 if (Is_Packed (Underlying_Type (Etype (Prefix (expr)))))
5531 return false;
5532
5533 const Uint bitpos
5534 = Normalized_First_Bit (Entity (Selector_Name (expr)));
5535 if (!UI_Is_In_Int_Range (bitpos)
5536 || (bitpos != UI_No_Uint && bitpos != UI_From_Int (0)))
5537 return false;
5538
5539 expr = Prefix (expr);
5540 break;
5541 }
5542
5543 case N_Indexed_Component:
5544 case N_Slice:
5545 {
5546 const Entity_Id t = Underlying_Type (Etype (Prefix (expr)));
5547
5548 if (Is_Array_Type (t) && Present (Packed_Array_Impl_Type (t)))
5549 return false;
5550
5551 expr = Prefix (expr);
5552 break;
5553 }
5554
5555 case N_Explicit_Dereference:
5556 expr = Prefix (expr);
5557 break;
5558
5559 default:
5560 return true;
5561 };
5562 }
5563 }
5564
5565 /* Perform final processing on global declarations. */
5566
5567 static GTY (()) tree dummy_global;
5568
5569 void
5570 gnat_write_global_declarations (void)
5571 {
5572 unsigned int i;
5573 tree iter;
5574
5575 /* If we have declared types as used at the global level, insert them in
5576 the global hash table. We use a dummy variable for this purpose, but
5577 we need to build it unconditionally to avoid -fcompare-debug issues. */
5578 if (first_global_object_name)
5579 {
5580 struct varpool_node *node;
5581 char *label;
5582
5583 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5584 dummy_global
5585 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5586 void_type_node);
5587 DECL_HARD_REGISTER (dummy_global) = 1;
5588 TREE_STATIC (dummy_global) = 1;
5589 node = varpool_node::get_create (dummy_global);
5590 node->definition = 1;
5591 node->force_output = 1;
5592
5593 if (types_used_by_cur_var_decl)
5594 while (!types_used_by_cur_var_decl->is_empty ())
5595 {
5596 tree t = types_used_by_cur_var_decl->pop ();
5597 types_used_by_var_decl_insert (t, dummy_global);
5598 }
5599 }
5600
5601 /* Output debug information for all global type declarations first. This
5602 ensures that global types whose compilation hasn't been finalized yet,
5603 for example pointers to Taft amendment types, have their compilation
5604 finalized in the right context. */
5605 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5606 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5607 debug_hooks->type_decl (iter, false);
5608
5609 /* Output imported functions. */
5610 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5611 if (TREE_CODE (iter) == FUNCTION_DECL
5612 && DECL_EXTERNAL (iter)
5613 && DECL_INITIAL (iter) == NULL
5614 && !DECL_IGNORED_P (iter)
5615 && DECL_FUNCTION_IS_DEF (iter))
5616 debug_hooks->early_global_decl (iter);
5617
5618 /* Then output the global variables. We need to do that after the debug
5619 information for global types is emitted so that they are finalized. Skip
5620 external global variables, unless we need to emit debug info for them:
5621 this is useful for imported variables, for instance. */
5622 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5623 if (TREE_CODE (iter) == VAR_DECL
5624 && (!DECL_EXTERNAL (iter) || !DECL_IGNORED_P (iter)))
5625 rest_of_decl_compilation (iter, true, 0);
5626
5627 /* Output the imported modules/declarations. In GNAT, these are only
5628 materializing subprogram. */
5629 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5630 if (TREE_CODE (iter) == IMPORTED_DECL && !DECL_IGNORED_P (iter))
5631 debug_hooks->imported_module_or_decl (iter, DECL_NAME (iter),
5632 DECL_CONTEXT (iter), false, false);
5633 }
5634
5635 /* ************************************************************************
5636 * * GCC builtins support *
5637 * ************************************************************************ */
5638
5639 /* The general scheme is fairly simple:
5640
5641 For each builtin function/type to be declared, gnat_install_builtins calls
5642 internal facilities which eventually get to gnat_pushdecl, which in turn
5643 tracks the so declared builtin function decls in the 'builtin_decls' global
5644 datastructure. When an Intrinsic subprogram declaration is processed, we
5645 search this global datastructure to retrieve the associated BUILT_IN DECL
5646 node. */
5647
5648 /* Search the chain of currently available builtin declarations for a node
5649 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5650 found, if any, or NULL_TREE otherwise. */
5651 tree
5652 builtin_decl_for (tree name)
5653 {
5654 unsigned i;
5655 tree decl;
5656
5657 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5658 if (DECL_NAME (decl) == name)
5659 return decl;
5660
5661 return NULL_TREE;
5662 }
5663
5664 /* The code below eventually exposes gnat_install_builtins, which declares
5665 the builtin types and functions we might need, either internally or as
5666 user accessible facilities.
5667
5668 ??? This is a first implementation shot, still in rough shape. It is
5669 heavily inspired from the "C" family implementation, with chunks copied
5670 verbatim from there.
5671
5672 Two obvious improvement candidates are:
5673 o Use a more efficient name/decl mapping scheme
5674 o Devise a middle-end infrastructure to avoid having to copy
5675 pieces between front-ends. */
5676
5677 /* ----------------------------------------------------------------------- *
5678 * BUILTIN ELEMENTARY TYPES *
5679 * ----------------------------------------------------------------------- */
5680
5681 /* Standard data types to be used in builtin argument declarations. */
5682
5683 enum c_tree_index
5684 {
5685 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5686 CTI_STRING_TYPE,
5687 CTI_CONST_STRING_TYPE,
5688
5689 CTI_MAX
5690 };
5691
5692 static tree c_global_trees[CTI_MAX];
5693
5694 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5695 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5696 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5697
5698 /* ??? In addition some attribute handlers, we currently don't support a
5699 (small) number of builtin-types, which in turns inhibits support for a
5700 number of builtin functions. */
5701 #define wint_type_node void_type_node
5702 #define intmax_type_node void_type_node
5703 #define uintmax_type_node void_type_node
5704
5705 /* Used to help initialize the builtin-types.def table. When a type of
5706 the correct size doesn't exist, use error_mark_node instead of NULL.
5707 The later results in segfaults even when a decl using the type doesn't
5708 get invoked. */
5709
5710 static tree
5711 builtin_type_for_size (int size, bool unsignedp)
5712 {
5713 tree type = gnat_type_for_size (size, unsignedp);
5714 return type ? type : error_mark_node;
5715 }
5716
5717 /* Build/push the elementary type decls that builtin functions/types
5718 will need. */
5719
5720 static void
5721 install_builtin_elementary_types (void)
5722 {
5723 signed_size_type_node = gnat_signed_type_for (size_type_node);
5724 pid_type_node = integer_type_node;
5725
5726 string_type_node = build_pointer_type (char_type_node);
5727 const_string_type_node
5728 = build_pointer_type (build_qualified_type
5729 (char_type_node, TYPE_QUAL_CONST));
5730 }
5731
5732 /* ----------------------------------------------------------------------- *
5733 * BUILTIN FUNCTION TYPES *
5734 * ----------------------------------------------------------------------- */
5735
5736 /* Now, builtin function types per se. */
5737
5738 enum c_builtin_type
5739 {
5740 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5741 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5742 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5743 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5744 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5745 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5746 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5747 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5748 ARG6) NAME,
5749 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5750 ARG6, ARG7) NAME,
5751 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5752 ARG6, ARG7, ARG8) NAME,
5753 #define DEF_FUNCTION_TYPE_9(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5754 ARG6, ARG7, ARG8, ARG9) NAME,
5755 #define DEF_FUNCTION_TYPE_10(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5756 ARG6, ARG7, ARG8, ARG9, ARG10) NAME,
5757 #define DEF_FUNCTION_TYPE_11(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5758 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) NAME,
5759 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5760 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5761 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5762 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5763 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5764 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5765 NAME,
5766 #define DEF_FUNCTION_TYPE_VAR_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5767 ARG6) NAME,
5768 #define DEF_FUNCTION_TYPE_VAR_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5769 ARG6, ARG7) NAME,
5770 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5771 #include "builtin-types.def"
5772 #undef DEF_PRIMITIVE_TYPE
5773 #undef DEF_FUNCTION_TYPE_0
5774 #undef DEF_FUNCTION_TYPE_1
5775 #undef DEF_FUNCTION_TYPE_2
5776 #undef DEF_FUNCTION_TYPE_3
5777 #undef DEF_FUNCTION_TYPE_4
5778 #undef DEF_FUNCTION_TYPE_5
5779 #undef DEF_FUNCTION_TYPE_6
5780 #undef DEF_FUNCTION_TYPE_7
5781 #undef DEF_FUNCTION_TYPE_8
5782 #undef DEF_FUNCTION_TYPE_9
5783 #undef DEF_FUNCTION_TYPE_10
5784 #undef DEF_FUNCTION_TYPE_11
5785 #undef DEF_FUNCTION_TYPE_VAR_0
5786 #undef DEF_FUNCTION_TYPE_VAR_1
5787 #undef DEF_FUNCTION_TYPE_VAR_2
5788 #undef DEF_FUNCTION_TYPE_VAR_3
5789 #undef DEF_FUNCTION_TYPE_VAR_4
5790 #undef DEF_FUNCTION_TYPE_VAR_5
5791 #undef DEF_FUNCTION_TYPE_VAR_6
5792 #undef DEF_FUNCTION_TYPE_VAR_7
5793 #undef DEF_POINTER_TYPE
5794 BT_LAST
5795 };
5796
5797 typedef enum c_builtin_type builtin_type;
5798
5799 /* A temporary array used in communication with def_fn_type. */
5800 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5801
5802 /* A helper function for install_builtin_types. Build function type
5803 for DEF with return type RET and N arguments. If VAR is true, then the
5804 function should be variadic after those N arguments.
5805
5806 Takes special care not to ICE if any of the types involved are
5807 error_mark_node, which indicates that said type is not in fact available
5808 (see builtin_type_for_size). In which case the function type as a whole
5809 should be error_mark_node. */
5810
5811 static void
5812 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5813 {
5814 tree t;
5815 tree *args = XALLOCAVEC (tree, n);
5816 va_list list;
5817 int i;
5818
5819 va_start (list, n);
5820 for (i = 0; i < n; ++i)
5821 {
5822 builtin_type a = (builtin_type) va_arg (list, int);
5823 t = builtin_types[a];
5824 if (t == error_mark_node)
5825 goto egress;
5826 args[i] = t;
5827 }
5828
5829 t = builtin_types[ret];
5830 if (t == error_mark_node)
5831 goto egress;
5832 if (var)
5833 t = build_varargs_function_type_array (t, n, args);
5834 else
5835 t = build_function_type_array (t, n, args);
5836
5837 egress:
5838 builtin_types[def] = t;
5839 va_end (list);
5840 }
5841
5842 /* Build the builtin function types and install them in the builtin_types
5843 array for later use in builtin function decls. */
5844
5845 static void
5846 install_builtin_function_types (void)
5847 {
5848 tree va_list_ref_type_node;
5849 tree va_list_arg_type_node;
5850
5851 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5852 {
5853 va_list_arg_type_node = va_list_ref_type_node =
5854 build_pointer_type (TREE_TYPE (va_list_type_node));
5855 }
5856 else
5857 {
5858 va_list_arg_type_node = va_list_type_node;
5859 va_list_ref_type_node = build_reference_type (va_list_type_node);
5860 }
5861
5862 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5863 builtin_types[ENUM] = VALUE;
5864 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5865 def_fn_type (ENUM, RETURN, 0, 0);
5866 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5867 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5868 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5869 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5870 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5871 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5872 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5873 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5874 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5875 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5876 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5877 ARG6) \
5878 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5879 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5880 ARG6, ARG7) \
5881 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5882 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5883 ARG6, ARG7, ARG8) \
5884 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5885 ARG7, ARG8);
5886 #define DEF_FUNCTION_TYPE_9(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5887 ARG6, ARG7, ARG8, ARG9) \
5888 def_fn_type (ENUM, RETURN, 0, 9, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5889 ARG7, ARG8, ARG9);
5890 #define DEF_FUNCTION_TYPE_10(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5891 ARG6, ARG7, ARG8, ARG9, ARG10) \
5892 def_fn_type (ENUM, RETURN, 0, 10, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5893 ARG7, ARG8, ARG9, ARG10);
5894 #define DEF_FUNCTION_TYPE_11(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5,\
5895 ARG6, ARG7, ARG8, ARG9, ARG10, ARG11) \
5896 def_fn_type (ENUM, RETURN, 0, 11, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5897 ARG7, ARG8, ARG9, ARG10, ARG11);
5898 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5899 def_fn_type (ENUM, RETURN, 1, 0);
5900 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5901 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5902 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5903 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5904 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5905 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5906 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5907 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5908 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5909 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5910 #define DEF_FUNCTION_TYPE_VAR_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5911 ARG6) \
5912 def_fn_type (ENUM, RETURN, 1, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5913 #define DEF_FUNCTION_TYPE_VAR_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5914 ARG6, ARG7) \
5915 def_fn_type (ENUM, RETURN, 1, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5916 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5917 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5918
5919 #include "builtin-types.def"
5920
5921 #undef DEF_PRIMITIVE_TYPE
5922 #undef DEF_FUNCTION_TYPE_0
5923 #undef DEF_FUNCTION_TYPE_1
5924 #undef DEF_FUNCTION_TYPE_2
5925 #undef DEF_FUNCTION_TYPE_3
5926 #undef DEF_FUNCTION_TYPE_4
5927 #undef DEF_FUNCTION_TYPE_5
5928 #undef DEF_FUNCTION_TYPE_6
5929 #undef DEF_FUNCTION_TYPE_7
5930 #undef DEF_FUNCTION_TYPE_8
5931 #undef DEF_FUNCTION_TYPE_9
5932 #undef DEF_FUNCTION_TYPE_10
5933 #undef DEF_FUNCTION_TYPE_11
5934 #undef DEF_FUNCTION_TYPE_VAR_0
5935 #undef DEF_FUNCTION_TYPE_VAR_1
5936 #undef DEF_FUNCTION_TYPE_VAR_2
5937 #undef DEF_FUNCTION_TYPE_VAR_3
5938 #undef DEF_FUNCTION_TYPE_VAR_4
5939 #undef DEF_FUNCTION_TYPE_VAR_5
5940 #undef DEF_FUNCTION_TYPE_VAR_6
5941 #undef DEF_FUNCTION_TYPE_VAR_7
5942 #undef DEF_POINTER_TYPE
5943 builtin_types[(int) BT_LAST] = NULL_TREE;
5944 }
5945
5946 /* ----------------------------------------------------------------------- *
5947 * BUILTIN ATTRIBUTES *
5948 * ----------------------------------------------------------------------- */
5949
5950 enum built_in_attribute
5951 {
5952 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5953 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5954 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5955 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5956 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5957 #include "builtin-attrs.def"
5958 #undef DEF_ATTR_NULL_TREE
5959 #undef DEF_ATTR_INT
5960 #undef DEF_ATTR_STRING
5961 #undef DEF_ATTR_IDENT
5962 #undef DEF_ATTR_TREE_LIST
5963 ATTR_LAST
5964 };
5965
5966 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5967
5968 static void
5969 install_builtin_attributes (void)
5970 {
5971 /* Fill in the built_in_attributes array. */
5972 #define DEF_ATTR_NULL_TREE(ENUM) \
5973 built_in_attributes[(int) ENUM] = NULL_TREE;
5974 #define DEF_ATTR_INT(ENUM, VALUE) \
5975 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5976 #define DEF_ATTR_STRING(ENUM, VALUE) \
5977 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5978 #define DEF_ATTR_IDENT(ENUM, STRING) \
5979 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5980 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5981 built_in_attributes[(int) ENUM] \
5982 = tree_cons (built_in_attributes[(int) PURPOSE], \
5983 built_in_attributes[(int) VALUE], \
5984 built_in_attributes[(int) CHAIN]);
5985 #include "builtin-attrs.def"
5986 #undef DEF_ATTR_NULL_TREE
5987 #undef DEF_ATTR_INT
5988 #undef DEF_ATTR_STRING
5989 #undef DEF_ATTR_IDENT
5990 #undef DEF_ATTR_TREE_LIST
5991 }
5992
5993 /* Handle a "const" attribute; arguments as in
5994 struct attribute_spec.handler. */
5995
5996 static tree
5997 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5998 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5999 bool *no_add_attrs)
6000 {
6001 if (TREE_CODE (*node) == FUNCTION_DECL)
6002 TREE_READONLY (*node) = 1;
6003 else
6004 *no_add_attrs = true;
6005
6006 return NULL_TREE;
6007 }
6008
6009 /* Handle a "nothrow" attribute; arguments as in
6010 struct attribute_spec.handler. */
6011
6012 static tree
6013 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6014 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6015 bool *no_add_attrs)
6016 {
6017 if (TREE_CODE (*node) == FUNCTION_DECL)
6018 TREE_NOTHROW (*node) = 1;
6019 else
6020 *no_add_attrs = true;
6021
6022 return NULL_TREE;
6023 }
6024
6025 /* Handle a "pure" attribute; arguments as in
6026 struct attribute_spec.handler. */
6027
6028 static tree
6029 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6030 int ARG_UNUSED (flags), bool *no_add_attrs)
6031 {
6032 if (TREE_CODE (*node) == FUNCTION_DECL)
6033 DECL_PURE_P (*node) = 1;
6034 /* TODO: support types. */
6035 else
6036 {
6037 warning (OPT_Wattributes, "%qs attribute ignored",
6038 IDENTIFIER_POINTER (name));
6039 *no_add_attrs = true;
6040 }
6041
6042 return NULL_TREE;
6043 }
6044
6045 /* Handle a "no vops" attribute; arguments as in
6046 struct attribute_spec.handler. */
6047
6048 static tree
6049 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6050 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6051 bool *ARG_UNUSED (no_add_attrs))
6052 {
6053 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6054 DECL_IS_NOVOPS (*node) = 1;
6055 return NULL_TREE;
6056 }
6057
6058 /* Helper for nonnull attribute handling; fetch the operand number
6059 from the attribute argument list. */
6060
6061 static bool
6062 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6063 {
6064 /* Verify the arg number is a constant. */
6065 if (!tree_fits_uhwi_p (arg_num_expr))
6066 return false;
6067
6068 *valp = TREE_INT_CST_LOW (arg_num_expr);
6069 return true;
6070 }
6071
6072 /* Handle the "nonnull" attribute. */
6073 static tree
6074 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6075 tree args, int ARG_UNUSED (flags),
6076 bool *no_add_attrs)
6077 {
6078 tree type = *node;
6079 unsigned HOST_WIDE_INT attr_arg_num;
6080
6081 /* If no arguments are specified, all pointer arguments should be
6082 non-null. Verify a full prototype is given so that the arguments
6083 will have the correct types when we actually check them later.
6084 Avoid diagnosing type-generic built-ins since those have no
6085 prototype. */
6086 if (!args)
6087 {
6088 if (!prototype_p (type)
6089 && (!TYPE_ATTRIBUTES (type)
6090 || !lookup_attribute ("type generic", TYPE_ATTRIBUTES (type))))
6091 {
6092 error ("nonnull attribute without arguments on a non-prototype");
6093 *no_add_attrs = true;
6094 }
6095 return NULL_TREE;
6096 }
6097
6098 /* Argument list specified. Verify that each argument number references
6099 a pointer argument. */
6100 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6101 {
6102 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6103
6104 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6105 {
6106 error ("nonnull argument has invalid operand number (argument %lu)",
6107 (unsigned long) attr_arg_num);
6108 *no_add_attrs = true;
6109 return NULL_TREE;
6110 }
6111
6112 if (prototype_p (type))
6113 {
6114 function_args_iterator iter;
6115 tree argument;
6116
6117 function_args_iter_init (&iter, type);
6118 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6119 {
6120 argument = function_args_iter_cond (&iter);
6121 if (!argument || ck_num == arg_num)
6122 break;
6123 }
6124
6125 if (!argument
6126 || TREE_CODE (argument) == VOID_TYPE)
6127 {
6128 error ("nonnull argument with out-of-range operand number "
6129 "(argument %lu, operand %lu)",
6130 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6131 *no_add_attrs = true;
6132 return NULL_TREE;
6133 }
6134
6135 if (TREE_CODE (argument) != POINTER_TYPE)
6136 {
6137 error ("nonnull argument references non-pointer operand "
6138 "(argument %lu, operand %lu)",
6139 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6140 *no_add_attrs = true;
6141 return NULL_TREE;
6142 }
6143 }
6144 }
6145
6146 return NULL_TREE;
6147 }
6148
6149 /* Handle a "sentinel" attribute. */
6150
6151 static tree
6152 handle_sentinel_attribute (tree *node, tree name, tree args,
6153 int ARG_UNUSED (flags), bool *no_add_attrs)
6154 {
6155 if (!prototype_p (*node))
6156 {
6157 warning (OPT_Wattributes,
6158 "%qs attribute requires prototypes with named arguments",
6159 IDENTIFIER_POINTER (name));
6160 *no_add_attrs = true;
6161 }
6162 else
6163 {
6164 if (!stdarg_p (*node))
6165 {
6166 warning (OPT_Wattributes,
6167 "%qs attribute only applies to variadic functions",
6168 IDENTIFIER_POINTER (name));
6169 *no_add_attrs = true;
6170 }
6171 }
6172
6173 if (args)
6174 {
6175 tree position = TREE_VALUE (args);
6176
6177 if (TREE_CODE (position) != INTEGER_CST)
6178 {
6179 warning (0, "requested position is not an integer constant");
6180 *no_add_attrs = true;
6181 }
6182 else
6183 {
6184 if (tree_int_cst_lt (position, integer_zero_node))
6185 {
6186 warning (0, "requested position is less than zero");
6187 *no_add_attrs = true;
6188 }
6189 }
6190 }
6191
6192 return NULL_TREE;
6193 }
6194
6195 /* Handle a "noreturn" attribute; arguments as in
6196 struct attribute_spec.handler. */
6197
6198 static tree
6199 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6200 int ARG_UNUSED (flags), bool *no_add_attrs)
6201 {
6202 tree type = TREE_TYPE (*node);
6203
6204 /* See FIXME comment in c_common_attribute_table. */
6205 if (TREE_CODE (*node) == FUNCTION_DECL)
6206 TREE_THIS_VOLATILE (*node) = 1;
6207 else if (TREE_CODE (type) == POINTER_TYPE
6208 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6209 TREE_TYPE (*node)
6210 = build_pointer_type
6211 (build_type_variant (TREE_TYPE (type),
6212 TYPE_READONLY (TREE_TYPE (type)), 1));
6213 else
6214 {
6215 warning (OPT_Wattributes, "%qs attribute ignored",
6216 IDENTIFIER_POINTER (name));
6217 *no_add_attrs = true;
6218 }
6219
6220 return NULL_TREE;
6221 }
6222
6223 /* Handle a "noinline" attribute; arguments as in
6224 struct attribute_spec.handler. */
6225
6226 static tree
6227 handle_noinline_attribute (tree *node, tree name,
6228 tree ARG_UNUSED (args),
6229 int ARG_UNUSED (flags), bool *no_add_attrs)
6230 {
6231 if (TREE_CODE (*node) == FUNCTION_DECL)
6232 {
6233 if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (*node)))
6234 {
6235 warning (OPT_Wattributes, "%qE attribute ignored due to conflict "
6236 "with attribute %qs", name, "always_inline");
6237 *no_add_attrs = true;
6238 }
6239 else
6240 DECL_UNINLINABLE (*node) = 1;
6241 }
6242 else
6243 {
6244 warning (OPT_Wattributes, "%qE attribute ignored", name);
6245 *no_add_attrs = true;
6246 }
6247
6248 return NULL_TREE;
6249 }
6250
6251 /* Handle a "noclone" attribute; arguments as in
6252 struct attribute_spec.handler. */
6253
6254 static tree
6255 handle_noclone_attribute (tree *node, tree name,
6256 tree ARG_UNUSED (args),
6257 int ARG_UNUSED (flags), bool *no_add_attrs)
6258 {
6259 if (TREE_CODE (*node) != FUNCTION_DECL)
6260 {
6261 warning (OPT_Wattributes, "%qE attribute ignored", name);
6262 *no_add_attrs = true;
6263 }
6264
6265 return NULL_TREE;
6266 }
6267
6268 /* Handle a "leaf" attribute; arguments as in
6269 struct attribute_spec.handler. */
6270
6271 static tree
6272 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6273 int ARG_UNUSED (flags), bool *no_add_attrs)
6274 {
6275 if (TREE_CODE (*node) != FUNCTION_DECL)
6276 {
6277 warning (OPT_Wattributes, "%qE attribute ignored", name);
6278 *no_add_attrs = true;
6279 }
6280 if (!TREE_PUBLIC (*node))
6281 {
6282 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6283 *no_add_attrs = true;
6284 }
6285
6286 return NULL_TREE;
6287 }
6288
6289 /* Handle a "always_inline" attribute; arguments as in
6290 struct attribute_spec.handler. */
6291
6292 static tree
6293 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6294 int ARG_UNUSED (flags), bool *no_add_attrs)
6295 {
6296 if (TREE_CODE (*node) == FUNCTION_DECL)
6297 {
6298 /* Set the attribute and mark it for disregarding inline limits. */
6299 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
6300 }
6301 else
6302 {
6303 warning (OPT_Wattributes, "%qE attribute ignored", name);
6304 *no_add_attrs = true;
6305 }
6306
6307 return NULL_TREE;
6308 }
6309
6310 /* Handle a "malloc" attribute; arguments as in
6311 struct attribute_spec.handler. */
6312
6313 static tree
6314 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6315 int ARG_UNUSED (flags), bool *no_add_attrs)
6316 {
6317 if (TREE_CODE (*node) == FUNCTION_DECL
6318 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6319 DECL_IS_MALLOC (*node) = 1;
6320 else
6321 {
6322 warning (OPT_Wattributes, "%qs attribute ignored",
6323 IDENTIFIER_POINTER (name));
6324 *no_add_attrs = true;
6325 }
6326
6327 return NULL_TREE;
6328 }
6329
6330 /* Fake handler for attributes we don't properly support. */
6331
6332 tree
6333 fake_attribute_handler (tree * ARG_UNUSED (node),
6334 tree ARG_UNUSED (name),
6335 tree ARG_UNUSED (args),
6336 int ARG_UNUSED (flags),
6337 bool * ARG_UNUSED (no_add_attrs))
6338 {
6339 return NULL_TREE;
6340 }
6341
6342 /* Handle a "type_generic" attribute. */
6343
6344 static tree
6345 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6346 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6347 bool * ARG_UNUSED (no_add_attrs))
6348 {
6349 /* Ensure we have a function type. */
6350 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6351
6352 /* Ensure we have a variadic function. */
6353 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6354
6355 return NULL_TREE;
6356 }
6357
6358 /* Handle a "vector_size" attribute; arguments as in
6359 struct attribute_spec.handler. */
6360
6361 static tree
6362 handle_vector_size_attribute (tree *node, tree name, tree args,
6363 int ARG_UNUSED (flags), bool *no_add_attrs)
6364 {
6365 tree type = *node;
6366 tree vector_type;
6367
6368 *no_add_attrs = true;
6369
6370 /* We need to provide for vector pointers, vector arrays, and
6371 functions returning vectors. For example:
6372
6373 __attribute__((vector_size(16))) short *foo;
6374
6375 In this case, the mode is SI, but the type being modified is
6376 HI, so we need to look further. */
6377 while (POINTER_TYPE_P (type)
6378 || TREE_CODE (type) == FUNCTION_TYPE
6379 || TREE_CODE (type) == ARRAY_TYPE)
6380 type = TREE_TYPE (type);
6381
6382 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6383 if (!vector_type)
6384 return NULL_TREE;
6385
6386 /* Build back pointers if needed. */
6387 *node = reconstruct_complex_type (*node, vector_type);
6388
6389 return NULL_TREE;
6390 }
6391
6392 /* Handle a "vector_type" attribute; arguments as in
6393 struct attribute_spec.handler. */
6394
6395 static tree
6396 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6397 int ARG_UNUSED (flags), bool *no_add_attrs)
6398 {
6399 tree type = *node;
6400 tree vector_type;
6401
6402 *no_add_attrs = true;
6403
6404 if (TREE_CODE (type) != ARRAY_TYPE)
6405 {
6406 error ("attribute %qs applies to array types only",
6407 IDENTIFIER_POINTER (name));
6408 return NULL_TREE;
6409 }
6410
6411 vector_type = build_vector_type_for_array (type, name);
6412 if (!vector_type)
6413 return NULL_TREE;
6414
6415 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6416 *node = vector_type;
6417
6418 return NULL_TREE;
6419 }
6420
6421 /* ----------------------------------------------------------------------- *
6422 * BUILTIN FUNCTIONS *
6423 * ----------------------------------------------------------------------- */
6424
6425 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6426 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6427 if nonansi_p and flag_no_nonansi_builtin. */
6428
6429 static void
6430 def_builtin_1 (enum built_in_function fncode,
6431 const char *name,
6432 enum built_in_class fnclass,
6433 tree fntype, tree libtype,
6434 bool both_p, bool fallback_p,
6435 bool nonansi_p ATTRIBUTE_UNUSED,
6436 tree fnattrs, bool implicit_p)
6437 {
6438 tree decl;
6439 const char *libname;
6440
6441 /* Preserve an already installed decl. It most likely was setup in advance
6442 (e.g. as part of the internal builtins) for specific reasons. */
6443 if (builtin_decl_explicit (fncode))
6444 return;
6445
6446 gcc_assert ((!both_p && !fallback_p)
6447 || !strncmp (name, "__builtin_",
6448 strlen ("__builtin_")));
6449
6450 libname = name + strlen ("__builtin_");
6451 decl = add_builtin_function (name, fntype, fncode, fnclass,
6452 (fallback_p ? libname : NULL),
6453 fnattrs);
6454 if (both_p)
6455 /* ??? This is normally further controlled by command-line options
6456 like -fno-builtin, but we don't have them for Ada. */
6457 add_builtin_function (libname, libtype, fncode, fnclass,
6458 NULL, fnattrs);
6459
6460 set_builtin_decl (fncode, decl, implicit_p);
6461 }
6462
6463 static int flag_isoc94 = 0;
6464 static int flag_isoc99 = 0;
6465 static int flag_isoc11 = 0;
6466
6467 /* Install what the common builtins.def offers. */
6468
6469 static void
6470 install_builtin_functions (void)
6471 {
6472 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6473 NONANSI_P, ATTRS, IMPLICIT, COND) \
6474 if (NAME && COND) \
6475 def_builtin_1 (ENUM, NAME, CLASS, \
6476 builtin_types[(int) TYPE], \
6477 builtin_types[(int) LIBTYPE], \
6478 BOTH_P, FALLBACK_P, NONANSI_P, \
6479 built_in_attributes[(int) ATTRS], IMPLICIT);
6480 #include "builtins.def"
6481 }
6482
6483 /* ----------------------------------------------------------------------- *
6484 * BUILTIN FUNCTIONS *
6485 * ----------------------------------------------------------------------- */
6486
6487 /* Install the builtin functions we might need. */
6488
6489 void
6490 gnat_install_builtins (void)
6491 {
6492 install_builtin_elementary_types ();
6493 install_builtin_function_types ();
6494 install_builtin_attributes ();
6495
6496 /* Install builtins used by generic middle-end pieces first. Some of these
6497 know about internal specificities and control attributes accordingly, for
6498 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6499 the generic definition from builtins.def. */
6500 build_common_builtin_nodes ();
6501
6502 /* Now, install the target specific builtins, such as the AltiVec family on
6503 ppc, and the common set as exposed by builtins.def. */
6504 targetm.init_builtins ();
6505 install_builtin_functions ();
6506 }
6507
6508 #include "gt-ada-utils.h"
6509 #include "gtype-ada.h"