c-decl.c (build_compound_literal): Use TYPE_READONLY.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41
42 #include "ada.h"
43 #include "types.h"
44 #include "atree.h"
45 #include "elists.h"
46 #include "namet.h"
47 #include "nlists.h"
48 #include "stringt.h"
49 #include "uintp.h"
50 #include "fe.h"
51 #include "sinfo.h"
52 #include "einfo.h"
53 #include "ada-tree.h"
54 #include "gigi.h"
55
56 #ifndef MAX_FIXED_MODE_SIZE
57 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
58 #endif
59
60 #ifndef MAX_BITS_PER_WORD
61 #define MAX_BITS_PER_WORD BITS_PER_WORD
62 #endif
63
64 /* If nonzero, pretend we are allocating at global level. */
65 int force_global;
66
67 /* Tree nodes for the various types and decls we create. */
68 tree gnat_std_decls[(int) ADT_LAST];
69
70 /* Functions to call for each of the possible raise reasons. */
71 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
72
73 /* Associates a GNAT tree node to a GCC tree node. It is used in
74 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
75 of `save_gnu_tree' for more info. */
76 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
77
78 /* This listhead is used to record any global objects that need elaboration.
79 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
80 initial value to assign. */
81
82 static GTY(()) tree pending_elaborations;
83
84 /* This stack allows us to momentarily switch to generating elaboration
85 lists for an inner context. */
86
87 struct e_stack GTY(()) {
88 struct e_stack *next;
89 tree elab_list;
90 };
91 static GTY(()) struct e_stack *elist_stack;
92
93 /* This variable keeps a table for types for each precision so that we only
94 allocate each of them once. Signed and unsigned types are kept separate.
95
96 Note that these types are only used when fold-const requests something
97 special. Perhaps we should NOT share these types; we'll see how it
98 goes later. */
99 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
100
101 /* Likewise for float types, but record these by mode. */
102 static GTY(()) tree float_types[NUM_MACHINE_MODES];
103
104 /* For each binding contour we allocate a binding_level structure which records
105 the entities defined or declared in that contour. Contours include:
106
107 the global one
108 one for each subprogram definition
109 one for each compound statement (declare block)
110
111 Binding contours are used to create GCC tree BLOCK nodes. */
112
113 struct binding_level GTY(())
114 {
115 /* A chain of ..._DECL nodes for all variables, constants, functions,
116 parameters and type declarations. These ..._DECL nodes are chained
117 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
118 in the reverse of the order supplied to be compatible with the
119 back-end. */
120 tree names;
121 /* For each level (except the global one), a chain of BLOCK nodes for all
122 the levels that were entered and exited one level down from this one. */
123 tree blocks;
124 /* The BLOCK node for this level, if one has been preallocated.
125 If 0, the BLOCK is allocated (if needed) when the level is popped. */
126 tree this_block;
127 /* The binding level containing this one (the enclosing binding level). */
128 struct binding_level *level_chain;
129 };
130
131 /* The binding level currently in effect. */
132 static GTY(()) struct binding_level *current_binding_level;
133
134 /* A chain of binding_level structures awaiting reuse. */
135 static GTY((deletable (""))) struct binding_level *free_binding_level;
136
137 /* The outermost binding level. This binding level is created when the
138 compiler is started and it will exist through the entire compilation. */
139 static struct binding_level *global_binding_level;
140
141 /* Binding level structures are initialized by copying this one. */
142 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
143
144 struct language_function GTY(())
145 {
146 int unused;
147 };
148
149 static tree merge_sizes (tree, tree, tree, int, int);
150 static tree compute_related_constant (tree, tree);
151 static tree split_plus (tree, tree *);
152 static int value_zerop (tree);
153 static tree float_type_for_precision (int, enum machine_mode);
154 static tree convert_to_fat_pointer (tree, tree);
155 static tree convert_to_thin_pointer (tree, tree);
156 static tree make_descriptor_field (const char *,tree, tree, tree);
157 static int value_factor_p (tree, int);
158 static int potential_alignment_gap (tree, tree, tree);
159 \f
160 /* Initialize the association of GNAT nodes to GCC trees. */
161
162 void
163 init_gnat_to_gnu (void)
164 {
165 associate_gnat_to_gnu
166 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
167
168 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
169 }
170
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
174
175 If GNU_DECL is zero, a previous association is to be reset. */
176
177 void
178 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
179 {
180 /* Check that GNAT_ENTITY is not already defined and that it is being set
181 to something which is a decl. Raise gigi 401 if not. Usually, this
182 means GNAT_ENTITY is defined twice, but occasionally is due to some
183 Gigi problem. */
184 if (gnu_decl
185 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
186 || (! no_check && ! DECL_P (gnu_decl))))
187 gigi_abort (401);
188
189 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
190 }
191
192 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
193 Return the ..._DECL node that was associated with it. If there is no tree
194 node associated with GNAT_ENTITY, abort.
195
196 In some cases, such as delayed elaboration or expressions that need to
197 be elaborated only once, GNAT_ENTITY is really not an entity. */
198
199 tree
200 get_gnu_tree (Entity_Id gnat_entity)
201 {
202 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
203 gigi_abort (402);
204
205 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
206 }
207
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
209
210 int
211 present_gnu_tree (Entity_Id gnat_entity)
212 {
213 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
214 }
215
216 \f
217 /* Return non-zero if we are currently in the global binding level. */
218
219 int
220 global_bindings_p (void)
221 {
222 return (force_global != 0 || current_binding_level == global_binding_level
223 ? -1 : 0);
224 }
225
226 /* Return the list of declarations in the current level. Note that this list
227 is in reverse order (it has to be so for back-end compatibility). */
228
229 tree
230 getdecls (void)
231 {
232 return current_binding_level->names;
233 }
234
235 /* Nonzero if the current level needs to have a BLOCK made. */
236
237 int
238 kept_level_p (void)
239 {
240 return (current_binding_level->names != 0);
241 }
242
243 /* Enter a new binding level. The input parameter is ignored, but has to be
244 specified for back-end compatibility. */
245
246 void
247 pushlevel (int ignore ATTRIBUTE_UNUSED)
248 {
249 struct binding_level *newlevel = NULL;
250
251 /* Reuse a struct for this binding level, if there is one. */
252 if (free_binding_level)
253 {
254 newlevel = free_binding_level;
255 free_binding_level = free_binding_level->level_chain;
256 }
257 else
258 newlevel
259 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
260
261 *newlevel = clear_binding_level;
262
263 /* Add this level to the front of the chain (stack) of levels that are
264 active. */
265 newlevel->level_chain = current_binding_level;
266 current_binding_level = newlevel;
267 }
268
269 /* Exit a binding level.
270 Pop the level off, and restore the state of the identifier-decl mappings
271 that were in effect when this level was entered.
272
273 If KEEP is nonzero, this level had explicit declarations, so
274 and create a "block" (a BLOCK node) for the level
275 to record its declarations and subblocks for symbol table output.
276
277 If FUNCTIONBODY is nonzero, this level is the body of a function,
278 so create a block as if KEEP were set and also clear out all
279 label names.
280
281 If REVERSE is nonzero, reverse the order of decls before putting
282 them into the BLOCK. */
283
284 tree
285 poplevel (int keep, int reverse, int functionbody)
286 {
287 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
288 binding level that we are about to exit and which is returned by this
289 routine. */
290 tree block = NULL_TREE;
291 tree decl_chain;
292 tree decl_node;
293 tree subblock_chain = current_binding_level->blocks;
294 tree subblock_node;
295 int block_previously_created;
296
297 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
298 nodes chained through the `names' field of current_binding_level are in
299 reverse order except for PARM_DECL node, which are explicitly stored in
300 the right order. */
301 current_binding_level->names
302 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
303 : current_binding_level->names;
304
305 /* Output any nested inline functions within this block which must be
306 compiled because their address is needed. */
307 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
308 if (TREE_CODE (decl_node) == FUNCTION_DECL
309 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
310 && DECL_INITIAL (decl_node) != 0)
311 {
312 push_function_context ();
313 output_inline_function (decl_node);
314 pop_function_context ();
315 }
316
317 block = 0;
318 block_previously_created = (current_binding_level->this_block != 0);
319 if (block_previously_created)
320 block = current_binding_level->this_block;
321 else if (keep || functionbody)
322 block = make_node (BLOCK);
323 if (block != 0)
324 {
325 BLOCK_VARS (block) = keep ? decl_chain : 0;
326 BLOCK_SUBBLOCKS (block) = subblock_chain;
327 }
328
329 /* Record the BLOCK node just built as the subblock its enclosing scope. */
330 for (subblock_node = subblock_chain; subblock_node;
331 subblock_node = TREE_CHAIN (subblock_node))
332 BLOCK_SUPERCONTEXT (subblock_node) = block;
333
334 /* Clear out the meanings of the local variables of this level. */
335
336 for (subblock_node = decl_chain; subblock_node;
337 subblock_node = TREE_CHAIN (subblock_node))
338 if (DECL_NAME (subblock_node) != 0)
339 /* If the identifier was used or addressed via a local extern decl,
340 don't forget that fact. */
341 if (DECL_EXTERNAL (subblock_node))
342 {
343 if (TREE_USED (subblock_node))
344 TREE_USED (DECL_NAME (subblock_node)) = 1;
345 if (TREE_ADDRESSABLE (subblock_node))
346 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
347 }
348
349 {
350 /* Pop the current level, and free the structure for reuse. */
351 struct binding_level *level = current_binding_level;
352 current_binding_level = current_binding_level->level_chain;
353 level->level_chain = free_binding_level;
354 free_binding_level = level;
355 }
356
357 if (functionbody)
358 {
359 /* This is the top level block of a function. The ..._DECL chain stored
360 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
361 leave them in the BLOCK because they are found in the FUNCTION_DECL
362 instead. */
363 DECL_INITIAL (current_function_decl) = block;
364 BLOCK_VARS (block) = 0;
365 }
366 else if (block)
367 {
368 if (!block_previously_created)
369 current_binding_level->blocks
370 = chainon (current_binding_level->blocks, block);
371 }
372
373 /* If we did not make a block for the level just exited, any blocks made for
374 inner levels (since they cannot be recorded as subblocks in that level)
375 must be carried forward so they will later become subblocks of something
376 else. */
377 else if (subblock_chain)
378 current_binding_level->blocks
379 = chainon (current_binding_level->blocks, subblock_chain);
380 if (block)
381 TREE_USED (block) = 1;
382
383 return block;
384 }
385 \f
386 /* Insert BLOCK at the end of the list of subblocks of the
387 current binding level. This is used when a BIND_EXPR is expanded,
388 to handle the BLOCK node inside the BIND_EXPR. */
389
390 void
391 insert_block (tree block)
392 {
393 TREE_USED (block) = 1;
394 current_binding_level->blocks
395 = chainon (current_binding_level->blocks, block);
396 }
397
398 /* Set the BLOCK node for the innermost scope
399 (the one we are currently in). */
400
401 void
402 set_block (tree block)
403 {
404 current_binding_level->this_block = block;
405 current_binding_level->names = chainon (current_binding_level->names,
406 BLOCK_VARS (block));
407 current_binding_level->blocks = chainon (current_binding_level->blocks,
408 BLOCK_SUBBLOCKS (block));
409 }
410
411 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
412 Returns the ..._DECL node. */
413
414 tree
415 pushdecl (tree decl)
416 {
417 struct binding_level *b;
418
419 /* If at top level, there is no context. But PARM_DECLs always go in the
420 level of its function. */
421 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
422 {
423 b = global_binding_level;
424 DECL_CONTEXT (decl) = 0;
425 }
426 else
427 {
428 b = current_binding_level;
429 DECL_CONTEXT (decl) = current_function_decl;
430 }
431
432 /* Put the declaration on the list. The list of declarations is in reverse
433 order. The list will be reversed later if necessary. This needs to be
434 this way for compatibility with the back-end.
435
436 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
437 will cause trouble with the debugger and aren't needed anyway. */
438 if (TREE_CODE (decl) != TYPE_DECL
439 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
440 {
441 TREE_CHAIN (decl) = b->names;
442 b->names = decl;
443 }
444
445 /* For the declaration of a type, set its name if it either is not already
446 set, was set to an IDENTIFIER_NODE, indicating an internal name,
447 or if the previous type name was not derived from a source name.
448 We'd rather have the type named with a real name and all the pointer
449 types to the same object have the same POINTER_TYPE node. Code in this
450 function in c-decl.c makes a copy of the type node here, but that may
451 cause us trouble with incomplete types, so let's not try it (at least
452 for now). */
453
454 if (TREE_CODE (decl) == TYPE_DECL
455 && DECL_NAME (decl) != 0
456 && (TYPE_NAME (TREE_TYPE (decl)) == 0
457 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
458 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
459 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
460 && ! DECL_ARTIFICIAL (decl))))
461 TYPE_NAME (TREE_TYPE (decl)) = decl;
462
463 return decl;
464 }
465 \f
466 /* Do little here. Set up the standard declarations later after the
467 front end has been run. */
468
469 void
470 gnat_init_decl_processing (void)
471 {
472 input_line = 0;
473
474 /* Make the binding_level structure for global names. */
475 current_function_decl = 0;
476 current_binding_level = 0;
477 free_binding_level = 0;
478 pushlevel (0);
479 global_binding_level = current_binding_level;
480
481 build_common_tree_nodes (0);
482
483 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
484 corresponding to the size of Pmode. In most cases when ptr_mode and
485 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
486 far better code using the width of Pmode. Make this here since we need
487 this before we can expand the GNAT types. */
488 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
489 build_common_tree_nodes_2 (0);
490
491 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
492
493 /* We need to make the integer type before doing anything else.
494 We stitch this in to the appropriate GNAT type later. */
495 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
496 integer_type_node));
497 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
498 char_type_node));
499
500 ptr_void_type_node = build_pointer_type (void_type_node);
501
502 }
503
504 /* Create the predefined scalar types such as `integer_type_node' needed
505 in the gcc back-end and initialize the global binding level. */
506
507 void
508 init_gigi_decls (tree long_long_float_type, tree exception_type)
509 {
510 tree endlink, decl;
511 unsigned int i;
512
513 /* Set the types that GCC and Gigi use from the front end. We would like
514 to do this for char_type_node, but it needs to correspond to the C
515 char type. */
516 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
517 {
518 /* In this case, the builtin floating point types are VAX float,
519 so make up a type for use. */
520 longest_float_type_node = make_node (REAL_TYPE);
521 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
522 layout_type (longest_float_type_node);
523 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
524 longest_float_type_node));
525 }
526 else
527 longest_float_type_node = TREE_TYPE (long_long_float_type);
528
529 except_type_node = TREE_TYPE (exception_type);
530
531 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
533 unsigned_type_node));
534
535 void_type_decl_node
536 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
537 void_type_node));
538
539 void_ftype = build_function_type (void_type_node, NULL_TREE);
540 ptr_void_ftype = build_pointer_type (void_ftype);
541
542 /* Now declare runtime functions. */
543 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
544
545 /* malloc is a function declaration tree for a function to allocate
546 memory. */
547 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
548 NULL_TREE,
549 build_function_type (ptr_void_type_node,
550 tree_cons (NULL_TREE,
551 sizetype,
552 endlink)),
553 NULL_TREE, 0, 1, 1, 0);
554
555 /* free is a function declaration tree for a function to free memory. */
556 free_decl
557 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
558 build_function_type (void_type_node,
559 tree_cons (NULL_TREE,
560 ptr_void_type_node,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0);
563
564 /* Make the types and functions used for exception processing. */
565 jmpbuf_type
566 = build_array_type (gnat_type_for_mode (Pmode, 0),
567 build_index_type (build_int_2 (5, 0)));
568 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
569 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
570
571 /* Functions to get and set the jumpbuf pointer for the current thread. */
572 get_jmpbuf_decl
573 = create_subprog_decl
574 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
575 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
576 NULL_TREE, 0, 1, 1, 0);
577
578 set_jmpbuf_decl
579 = create_subprog_decl
580 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
581 NULL_TREE,
582 build_function_type (void_type_node,
583 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
584 NULL_TREE, 0, 1, 1, 0);
585
586 /* Function to get the current exception. */
587 get_excptr_decl
588 = create_subprog_decl
589 (get_identifier ("system__soft_links__get_gnat_exception"),
590 NULL_TREE,
591 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
592 NULL_TREE, 0, 1, 1, 0);
593
594 /* Functions that raise exceptions. */
595 raise_nodefer_decl
596 = create_subprog_decl
597 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE,
600 build_pointer_type (except_type_node),
601 endlink)),
602 NULL_TREE, 0, 1, 1, 0);
603
604 /* Hooks to call when entering/leaving an exception handler. */
605 begin_handler_decl
606 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
607 build_function_type (void_type_node,
608 tree_cons (NULL_TREE,
609 ptr_void_type_node,
610 endlink)),
611 NULL_TREE, 0, 1, 1, 0);
612
613 end_handler_decl
614 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
615 build_function_type (void_type_node,
616 tree_cons (NULL_TREE,
617 ptr_void_type_node,
618 endlink)),
619 NULL_TREE, 0, 1, 1, 0);
620
621 /* If in no exception handlers mode, all raise statements are redirected to
622 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
623 this procedure will never be called in this mode. */
624 if (No_Exception_Handlers_Set ())
625 {
626 decl
627 = create_subprog_decl
628 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
629 build_function_type (void_type_node,
630 tree_cons (NULL_TREE,
631 build_pointer_type (char_type_node),
632 tree_cons (NULL_TREE,
633 integer_type_node,
634 endlink))),
635 NULL_TREE, 0, 1, 1, 0);
636
637 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
638 gnat_raise_decls[i] = decl;
639 }
640 else
641 /* Otherwise, make one decl for each exception reason. */
642 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
643 {
644 char name[17];
645
646 sprintf (name, "__gnat_rcheck_%.2d", i);
647 gnat_raise_decls[i]
648 = create_subprog_decl
649 (get_identifier (name), NULL_TREE,
650 build_function_type (void_type_node,
651 tree_cons (NULL_TREE,
652 build_pointer_type
653 (char_type_node),
654 tree_cons (NULL_TREE,
655 integer_type_node,
656 endlink))),
657 NULL_TREE, 0, 1, 1, 0);
658 }
659
660 /* Indicate that these never return. */
661 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
662 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
663 TREE_TYPE (raise_nodefer_decl)
664 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
665 TYPE_QUAL_VOLATILE);
666
667 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
668 {
669 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
670 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
671 TREE_TYPE (gnat_raise_decls[i])
672 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
673 TYPE_QUAL_VOLATILE);
674 }
675
676 /* setjmp returns an integer and has one operand, which is a pointer to
677 a jmpbuf. */
678 setjmp_decl
679 = create_subprog_decl
680 (get_identifier ("__builtin_setjmp"), NULL_TREE,
681 build_function_type (integer_type_node,
682 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
683 NULL_TREE, 0, 1, 1, 0);
684
685 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
686 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
687
688 main_identifier_node = get_identifier ("main");
689 }
690 \f
691 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
692 nodes (FIELDLIST), finish constructing the record or union type.
693 If HAS_REP is nonzero, this record has a rep clause; don't call
694 layout_type but merely set the size and alignment ourselves.
695 If DEFER_DEBUG is nonzero, do not call the debugging routines
696 on this type; it will be done later. */
697
698 void
699 finish_record_type (tree record_type,
700 tree fieldlist,
701 int has_rep,
702 int defer_debug)
703 {
704 enum tree_code code = TREE_CODE (record_type);
705 tree ada_size = bitsize_zero_node;
706 tree size = bitsize_zero_node;
707 tree size_unit = size_zero_node;
708 int var_size = 0;
709 tree field;
710
711 TYPE_FIELDS (record_type) = fieldlist;
712
713 if (TYPE_NAME (record_type) != 0
714 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
715 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
716 else
717 TYPE_STUB_DECL (record_type)
718 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
719 record_type));
720
721 /* We don't need both the typedef name and the record name output in
722 the debugging information, since they are the same. */
723 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
724
725 /* Globally initialize the record first. If this is a rep'ed record,
726 that just means some initializations; otherwise, layout the record. */
727
728 if (has_rep)
729 {
730 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
731 TYPE_MODE (record_type) = BLKmode;
732 if (TYPE_SIZE (record_type) == 0)
733 {
734 TYPE_SIZE (record_type) = bitsize_zero_node;
735 TYPE_SIZE_UNIT (record_type) = size_zero_node;
736 }
737 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
738 out just like a UNION_TYPE, since the size will be fixed. */
739 else if (code == QUAL_UNION_TYPE)
740 code = UNION_TYPE;
741 }
742 else
743 {
744 /* Ensure there isn't a size already set. There can be in an error
745 case where there is a rep clause but all fields have errors and
746 no longer have a position. */
747 TYPE_SIZE (record_type) = 0;
748 layout_type (record_type);
749 }
750
751 /* At this point, the position and size of each field is known. It was
752 either set before entry by a rep clause, or by laying out the type above.
753
754 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
755 to compute the Ada size; the GCC size and alignment (for rep'ed records
756 that are not padding types); and the mode (for rep'ed records). We also
757 clear the DECL_BIT_FIELD indication for the cases we know have not been
758 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
759
760 if (code == QUAL_UNION_TYPE)
761 fieldlist = nreverse (fieldlist);
762
763 for (field = fieldlist; field; field = TREE_CHAIN (field))
764 {
765 tree pos = bit_position (field);
766
767 tree type = TREE_TYPE (field);
768 tree this_size = DECL_SIZE (field);
769 tree this_size_unit = DECL_SIZE_UNIT (field);
770 tree this_ada_size = DECL_SIZE (field);
771
772 /* We need to make an XVE/XVU record if any field has variable size,
773 whether or not the record does. For example, if we have an union,
774 it may be that all fields, rounded up to the alignment, have the
775 same size, in which case we'll use that size. But the debug
776 output routines (except Dwarf2) won't be able to output the fields,
777 so we need to make the special record. */
778 if (TREE_CODE (this_size) != INTEGER_CST)
779 var_size = 1;
780
781 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
782 || TREE_CODE (type) == QUAL_UNION_TYPE)
783 && ! TYPE_IS_FAT_POINTER_P (type)
784 && ! TYPE_CONTAINS_TEMPLATE_P (type)
785 && TYPE_ADA_SIZE (type) != 0)
786 this_ada_size = TYPE_ADA_SIZE (type);
787
788 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
789 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
790 && value_factor_p (pos, BITS_PER_UNIT)
791 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
792 DECL_BIT_FIELD (field) = 0;
793
794 /* If we still have DECL_BIT_FIELD set at this point, we know the field
795 is technically not addressable. Except that it can actually be
796 addressed if the field is BLKmode and happens to be properly
797 aligned. */
798 DECL_NONADDRESSABLE_P (field)
799 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
800
801 if (has_rep && ! DECL_BIT_FIELD (field))
802 TYPE_ALIGN (record_type)
803 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
804
805 switch (code)
806 {
807 case UNION_TYPE:
808 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
809 size = size_binop (MAX_EXPR, size, this_size);
810 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
811 break;
812
813 case QUAL_UNION_TYPE:
814 ada_size
815 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
816 this_ada_size, ada_size));
817 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
818 this_size, size));
819 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
820 this_size_unit, size_unit));
821 break;
822
823 case RECORD_TYPE:
824 /* Since we know here that all fields are sorted in order of
825 increasing bit position, the size of the record is one
826 higher than the ending bit of the last field processed
827 unless we have a rep clause, since in that case we might
828 have a field outside a QUAL_UNION_TYPE that has a higher ending
829 position. So use a MAX in that case. Also, if this field is a
830 QUAL_UNION_TYPE, we need to take into account the previous size in
831 the case of empty variants. */
832 ada_size
833 = merge_sizes (ada_size, pos, this_ada_size,
834 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
835 size = merge_sizes (size, pos, this_size,
836 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
837 size_unit
838 = merge_sizes (size_unit, byte_position (field), this_size_unit,
839 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
840 break;
841
842 default:
843 abort ();
844 }
845 }
846
847 if (code == QUAL_UNION_TYPE)
848 nreverse (fieldlist);
849
850 /* If this is a padding record, we never want to make the size smaller than
851 what was specified in it, if any. */
852 if (TREE_CODE (record_type) == RECORD_TYPE
853 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
854 {
855 size = TYPE_SIZE (record_type);
856 size_unit = TYPE_SIZE_UNIT (record_type);
857 }
858
859 /* Now set any of the values we've just computed that apply. */
860 if (! TYPE_IS_FAT_POINTER_P (record_type)
861 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
862 SET_TYPE_ADA_SIZE (record_type, ada_size);
863
864 if (has_rep)
865 {
866 if (! (TREE_CODE (record_type) == RECORD_TYPE
867 && TYPE_IS_PADDING_P (record_type)
868 && CONTAINS_PLACEHOLDER_P (size)))
869 {
870 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
871 TYPE_SIZE_UNIT (record_type)
872 = round_up (size_unit,
873 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
874 }
875
876 compute_record_mode (record_type);
877 }
878
879 if (! defer_debug)
880 {
881 /* If this record is of variable size, rename it so that the
882 debugger knows it is and make a new, parallel, record
883 that tells the debugger how the record is laid out. See
884 exp_dbug.ads. But don't do this for records that are padding
885 since they confuse GDB. */
886 if (var_size
887 && ! (TREE_CODE (record_type) == RECORD_TYPE
888 && TYPE_IS_PADDING_P (record_type)))
889 {
890 tree new_record_type
891 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
892 ? UNION_TYPE : TREE_CODE (record_type));
893 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
894 tree new_id
895 = concat_id_with_name (orig_id,
896 TREE_CODE (record_type) == QUAL_UNION_TYPE
897 ? "XVU" : "XVE");
898 tree last_pos = bitsize_zero_node;
899 tree old_field;
900 tree prev_old_field = 0;
901
902 TYPE_NAME (new_record_type) = new_id;
903 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
904 TYPE_STUB_DECL (new_record_type)
905 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
906 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
907 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
908 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
909 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
910
911 /* Now scan all the fields, replacing each field with a new
912 field corresponding to the new encoding. */
913 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
914 old_field = TREE_CHAIN (old_field))
915 {
916 tree field_type = TREE_TYPE (old_field);
917 tree field_name = DECL_NAME (old_field);
918 tree new_field;
919 tree curpos = bit_position (old_field);
920 int var = 0;
921 unsigned int align = 0;
922 tree pos;
923
924 /* See how the position was modified from the last position.
925
926 There are two basic cases we support: a value was added
927 to the last position or the last position was rounded to
928 a boundary and they something was added. Check for the
929 first case first. If not, see if there is any evidence
930 of rounding. If so, round the last position and try
931 again.
932
933 If this is a union, the position can be taken as zero. */
934
935 if (TREE_CODE (new_record_type) == UNION_TYPE)
936 pos = bitsize_zero_node, align = 0;
937 else
938 pos = compute_related_constant (curpos, last_pos);
939
940 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
941 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
942 {
943 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
944 pos = compute_related_constant (curpos,
945 round_up (last_pos, align));
946 }
947 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
948 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
949 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
950 && host_integerp (TREE_OPERAND
951 (TREE_OPERAND (curpos, 0), 1),
952 1))
953 {
954 align
955 = tree_low_cst
956 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
957 pos = compute_related_constant (curpos,
958 round_up (last_pos, align));
959 }
960 else if (potential_alignment_gap (prev_old_field, old_field,
961 pos))
962 {
963 align = TYPE_ALIGN (field_type);
964 pos = compute_related_constant (curpos,
965 round_up (last_pos, align));
966 }
967
968 /* If we can't compute a position, set it to zero.
969
970 ??? We really should abort here, but it's too much work
971 to get this correct for all cases. */
972
973 if (pos == 0)
974 pos = bitsize_zero_node;
975
976 /* See if this type is variable-size and make a new type
977 and indicate the indirection if so. */
978 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
979 {
980 field_type = build_pointer_type (field_type);
981 var = 1;
982 }
983
984 /* Make a new field name, if necessary. */
985 if (var || align != 0)
986 {
987 char suffix[6];
988
989 if (align != 0)
990 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
991 align / BITS_PER_UNIT);
992 else
993 strcpy (suffix, "XVL");
994
995 field_name = concat_id_with_name (field_name, suffix);
996 }
997
998 new_field = create_field_decl (field_name, field_type,
999 new_record_type, 0,
1000 DECL_SIZE (old_field), pos, 0);
1001 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1002 TYPE_FIELDS (new_record_type) = new_field;
1003
1004 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1005 zero. The only time it's not the last field of the record
1006 is when there are other components at fixed positions after
1007 it (meaning there was a rep clause for every field) and we
1008 want to be able to encode them. */
1009 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1010 (TREE_CODE (TREE_TYPE (old_field))
1011 == QUAL_UNION_TYPE)
1012 ? bitsize_zero_node
1013 : DECL_SIZE (old_field));
1014 prev_old_field = old_field;
1015 }
1016
1017 TYPE_FIELDS (new_record_type)
1018 = nreverse (TYPE_FIELDS (new_record_type));
1019
1020 rest_of_type_compilation (new_record_type, global_bindings_p ());
1021 }
1022
1023 rest_of_type_compilation (record_type, global_bindings_p ());
1024 }
1025 }
1026
1027 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1028 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1029 if this represents a QUAL_UNION_TYPE in which case we must look for
1030 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1031 is nonzero, we must take the MAX of the end position of this field
1032 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1033
1034 We return an expression for the size. */
1035
1036 static tree
1037 merge_sizes (tree last_size,
1038 tree first_bit,
1039 tree size,
1040 int special,
1041 int has_rep)
1042 {
1043 tree type = TREE_TYPE (last_size);
1044 tree new;
1045
1046 if (! special || TREE_CODE (size) != COND_EXPR)
1047 {
1048 new = size_binop (PLUS_EXPR, first_bit, size);
1049 if (has_rep)
1050 new = size_binop (MAX_EXPR, last_size, new);
1051 }
1052
1053 else
1054 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1055 integer_zerop (TREE_OPERAND (size, 1))
1056 ? last_size : merge_sizes (last_size, first_bit,
1057 TREE_OPERAND (size, 1),
1058 1, has_rep),
1059 integer_zerop (TREE_OPERAND (size, 2))
1060 ? last_size : merge_sizes (last_size, first_bit,
1061 TREE_OPERAND (size, 2),
1062 1, has_rep)));
1063
1064 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1065 when fed through substitute_in_expr) into thinking that a constant
1066 size is not constant. */
1067 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1068 new = TREE_OPERAND (new, 0);
1069
1070 return new;
1071 }
1072
1073 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1074 related by the addition of a constant. Return that constant if so. */
1075
1076 static tree
1077 compute_related_constant (tree op0, tree op1)
1078 {
1079 tree op0_var, op1_var;
1080 tree op0_con = split_plus (op0, &op0_var);
1081 tree op1_con = split_plus (op1, &op1_var);
1082 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1083
1084 if (operand_equal_p (op0_var, op1_var, 0))
1085 return result;
1086 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1087 return result;
1088 else
1089 return 0;
1090 }
1091
1092 /* Utility function of above to split a tree OP which may be a sum, into a
1093 constant part, which is returned, and a variable part, which is stored
1094 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1095 bitsizetype. */
1096
1097 static tree
1098 split_plus (tree in, tree *pvar)
1099 {
1100 /* Strip NOPS in order to ease the tree traversal and maximize the
1101 potential for constant or plus/minus discovery. We need to be careful
1102 to always return and set *pvar to bitsizetype trees, but it's worth
1103 the effort. */
1104 STRIP_NOPS (in);
1105
1106 *pvar = convert (bitsizetype, in);
1107
1108 if (TREE_CODE (in) == INTEGER_CST)
1109 {
1110 *pvar = bitsize_zero_node;
1111 return convert (bitsizetype, in);
1112 }
1113 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1114 {
1115 tree lhs_var, rhs_var;
1116 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1117 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1118
1119 if (lhs_var == TREE_OPERAND (in, 0)
1120 && rhs_var == TREE_OPERAND (in, 1))
1121 return bitsize_zero_node;
1122
1123 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1124 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1125 }
1126 else
1127 return bitsize_zero_node;
1128 }
1129 \f
1130 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1131 subprogram. If it is void_type_node, then we are dealing with a procedure,
1132 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1133 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1134 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1135 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1136 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1137 RETURNS_WITH_DSP is nonzero if the function is to return with a
1138 depressed stack pointer. */
1139
1140 tree
1141 create_subprog_type (tree return_type,
1142 tree param_decl_list,
1143 tree cico_list,
1144 int returns_unconstrained,
1145 int returns_by_ref,
1146 int returns_with_dsp)
1147 {
1148 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1149 the subprogram formal parameters. This list is generated by traversing the
1150 input list of PARM_DECL nodes. */
1151 tree param_type_list = NULL;
1152 tree param_decl;
1153 tree type;
1154
1155 for (param_decl = param_decl_list; param_decl;
1156 param_decl = TREE_CHAIN (param_decl))
1157 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1158 param_type_list);
1159
1160 /* The list of the function parameter types has to be terminated by the void
1161 type to signal to the back-end that we are not dealing with a variable
1162 parameter subprogram, but that the subprogram has a fixed number of
1163 parameters. */
1164 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1165
1166 /* The list of argument types has been created in reverse
1167 so nreverse it. */
1168 param_type_list = nreverse (param_type_list);
1169
1170 type = build_function_type (return_type, param_type_list);
1171
1172 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1173 or the new type should, make a copy of TYPE. Likewise for
1174 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1175 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1176 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1177 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1178 type = copy_type (type);
1179
1180 SET_TYPE_CI_CO_LIST (type, cico_list);
1181 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1182 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1183 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1184 return type;
1185 }
1186 \f
1187 /* Return a copy of TYPE but safe to modify in any way. */
1188
1189 tree
1190 copy_type (tree type)
1191 {
1192 tree new = copy_node (type);
1193
1194 /* copy_node clears this field instead of copying it, because it is
1195 aliased with TREE_CHAIN. */
1196 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1197
1198 TYPE_POINTER_TO (new) = 0;
1199 TYPE_REFERENCE_TO (new) = 0;
1200 TYPE_MAIN_VARIANT (new) = new;
1201 TYPE_NEXT_VARIANT (new) = 0;
1202
1203 return new;
1204 }
1205 \f
1206 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1207 TYPE_INDEX_TYPE is INDEX. */
1208
1209 tree
1210 create_index_type (tree min, tree max, tree index)
1211 {
1212 /* First build a type for the desired range. */
1213 tree type = build_index_2_type (min, max);
1214
1215 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1216 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1217 is set, but not to INDEX, make a copy of this type with the requested
1218 index type. Note that we have no way of sharing these types, but that's
1219 only a small hole. */
1220 if (TYPE_INDEX_TYPE (type) == index)
1221 return type;
1222 else if (TYPE_INDEX_TYPE (type) != 0)
1223 type = copy_type (type);
1224
1225 SET_TYPE_INDEX_TYPE (type, index);
1226 return type;
1227 }
1228 \f
1229 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1230 string) and TYPE is a ..._TYPE node giving its data type.
1231 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1232 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1233 information about this type. */
1234
1235 tree
1236 create_type_decl (tree type_name,
1237 tree type,
1238 struct attrib *attr_list,
1239 int artificial_p,
1240 int debug_info_p)
1241 {
1242 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1243 enum tree_code code = TREE_CODE (type);
1244
1245 DECL_ARTIFICIAL (type_decl) = artificial_p;
1246 pushdecl (type_decl);
1247 process_attributes (type_decl, attr_list);
1248
1249 /* Pass type declaration information to the debugger unless this is an
1250 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1251 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1252 a dummy type, which will be completed later, or a type for which
1253 debugging information was not requested. */
1254 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1255 || ! debug_info_p)
1256 DECL_IGNORED_P (type_decl) = 1;
1257 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1258 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1259 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1260 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1261
1262 return type_decl;
1263 }
1264
1265 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1266 ASM_NAME is its assembler name (if provided). TYPE is its data type
1267 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1268 expression; NULL_TREE if none.
1269
1270 CONST_FLAG is nonzero if this variable is constant.
1271
1272 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1273 the current compilation unit. This flag should be set when processing the
1274 variable definitions in a package specification. EXTERN_FLAG is nonzero
1275 when processing an external variable declaration (as opposed to a
1276 definition: no storage is to be allocated for the variable here).
1277
1278 STATIC_FLAG is only relevant when not at top level. In that case
1279 it indicates whether to always allocate storage to the variable. */
1280
1281 tree
1282 create_var_decl (tree var_name,
1283 tree asm_name,
1284 tree type,
1285 tree var_init,
1286 int const_flag,
1287 int public_flag,
1288 int extern_flag,
1289 int static_flag,
1290 struct attrib *attr_list)
1291 {
1292 int init_const
1293 = (var_init == 0
1294 ? 0
1295 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1296 && (global_bindings_p () || static_flag
1297 ? 0 != initializer_constant_valid_p (var_init,
1298 TREE_TYPE (var_init))
1299 : TREE_CONSTANT (var_init))));
1300 tree var_decl
1301 = build_decl ((const_flag && init_const
1302 /* Only make a CONST_DECL for sufficiently-small objects.
1303 We consider complex double "sufficiently-small" */
1304 && TYPE_SIZE (type) != 0
1305 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1306 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1307 GET_MODE_SIZE (DCmode)))
1308 ? CONST_DECL : VAR_DECL, var_name, type);
1309 tree assign_init = 0;
1310
1311 /* If this is external, throw away any initializations unless this is a
1312 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1313 we are defining a global here, leave a constant initialization and save
1314 any variable elaborations for the elaboration routine. Otherwise, if
1315 the initializing expression is not the same as TYPE, generate the
1316 initialization with an assignment statement, since it knows how
1317 to do the required adjustents. If we are just annotating types,
1318 throw away the initialization if it isn't a constant. */
1319
1320 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1321 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1322 var_init = 0;
1323
1324 if (global_bindings_p () && var_init != 0 && ! init_const)
1325 {
1326 add_pending_elaborations (var_decl, var_init);
1327 var_init = 0;
1328 }
1329
1330 else if (var_init != 0
1331 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1332 != TYPE_MAIN_VARIANT (type))
1333 || (static_flag && ! init_const)))
1334 assign_init = var_init, var_init = 0;
1335
1336 DECL_COMMON (var_decl) = !flag_no_common;
1337 DECL_INITIAL (var_decl) = var_init;
1338 TREE_READONLY (var_decl) = const_flag;
1339 DECL_EXTERNAL (var_decl) = extern_flag;
1340 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1341 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1342 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1343 = TYPE_VOLATILE (type);
1344
1345 /* At the global binding level we need to allocate static storage for the
1346 variable if and only if its not external. If we are not at the top level
1347 we allocate automatic storage unless requested not to. */
1348 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1349
1350 if (asm_name != 0)
1351 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1352
1353 process_attributes (var_decl, attr_list);
1354
1355 /* Add this decl to the current binding level and generate any
1356 needed code and RTL. */
1357 var_decl = pushdecl (var_decl);
1358 expand_decl (var_decl);
1359
1360 if (DECL_CONTEXT (var_decl) != 0)
1361 expand_decl_init (var_decl);
1362
1363 /* If this is volatile, force it into memory. */
1364 if (TREE_SIDE_EFFECTS (var_decl))
1365 gnat_mark_addressable (var_decl);
1366
1367 if (TREE_CODE (var_decl) != CONST_DECL)
1368 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1369
1370 if (assign_init != 0)
1371 {
1372 /* If VAR_DECL has a padded type, convert it to the unpadded
1373 type so the assignment is done properly. */
1374 tree lhs = var_decl;
1375
1376 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1377 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1378 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1379
1380 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1381 assign_init));
1382 }
1383
1384 return var_decl;
1385 }
1386 \f
1387 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1388 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1389 this field is in a record type with a "pragma pack". If SIZE is nonzero
1390 it is the specified size for this field. If POS is nonzero, it is the bit
1391 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1392 the address of this field for aliasing purposes. */
1393
1394 tree
1395 create_field_decl (tree field_name,
1396 tree field_type,
1397 tree record_type,
1398 int packed,
1399 tree size,
1400 tree pos,
1401 int addressable)
1402 {
1403 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1404
1405 DECL_CONTEXT (field_decl) = record_type;
1406 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1407
1408 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1409 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1410 if (packed && TYPE_MODE (field_type) == BLKmode)
1411 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1412
1413 /* If a size is specified, use it. Otherwise, if the record type is packed
1414 compute a size to use, which may differ from the object's natural size.
1415 We always set a size in this case to trigger the checks for bitfield
1416 creation below, which is typically required when no position has been
1417 specified. */
1418 if (size != 0)
1419 size = convert (bitsizetype, size);
1420 else if (packed == 1)
1421 {
1422 size = rm_size (field_type);
1423
1424 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1425 byte. */
1426 if (TREE_CODE (size) == INTEGER_CST
1427 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1428 size = round_up (size, BITS_PER_UNIT);
1429 }
1430
1431 /* Make a bitfield if a size is specified for two reasons: first if the size
1432 differs from the natural size. Second, if the alignment is insufficient.
1433 There are a number of ways the latter can be true.
1434
1435 We never make a bitfield if the type of the field has a nonconstant size,
1436 or if it is claimed to be addressable, because no such entity requiring
1437 bitfield operations should reach here.
1438
1439 We do *preventively* make a bitfield when there might be the need for it
1440 but we don't have all the necessary information to decide, as is the case
1441 of a field with no specified position in a packed record.
1442
1443 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1444 in layout_decl or finish_record_type to clear the bit_field indication if
1445 it is in fact not needed. */
1446 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1447 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1448 && ! addressable
1449 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1450 || (pos != 0
1451 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1452 bitsize_int (TYPE_ALIGN
1453 (field_type)))))
1454 || packed
1455 || (TYPE_ALIGN (record_type) != 0
1456 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1457 {
1458 DECL_BIT_FIELD (field_decl) = 1;
1459 DECL_SIZE (field_decl) = size;
1460 if (! packed && pos == 0)
1461 DECL_ALIGN (field_decl)
1462 = (TYPE_ALIGN (record_type) != 0
1463 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1464 : TYPE_ALIGN (field_type));
1465 }
1466
1467 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1468 DECL_ALIGN (field_decl)
1469 = MAX (DECL_ALIGN (field_decl),
1470 DECL_BIT_FIELD (field_decl) ? 1
1471 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1472 : TYPE_ALIGN (field_type));
1473
1474 if (pos != 0)
1475 {
1476 /* We need to pass in the alignment the DECL is known to have.
1477 This is the lowest-order bit set in POS, but no more than
1478 the alignment of the record, if one is specified. Note
1479 that an alignment of 0 is taken as infinite. */
1480 unsigned int known_align;
1481
1482 if (host_integerp (pos, 1))
1483 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1484 else
1485 known_align = BITS_PER_UNIT;
1486
1487 if (TYPE_ALIGN (record_type)
1488 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1489 known_align = TYPE_ALIGN (record_type);
1490
1491 layout_decl (field_decl, known_align);
1492 SET_DECL_OFFSET_ALIGN (field_decl,
1493 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1494 : BITS_PER_UNIT);
1495 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1496 &DECL_FIELD_BIT_OFFSET (field_decl),
1497 DECL_OFFSET_ALIGN (field_decl), pos);
1498
1499 DECL_HAS_REP_P (field_decl) = 1;
1500 }
1501
1502 /* If the field type is passed by reference, we will have pointers to the
1503 field, so it is addressable. */
1504 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1505 addressable = 1;
1506
1507 /* ??? For now, we say that any field of aggregate type is addressable
1508 because the front end may take 'Reference of it. */
1509 if (AGGREGATE_TYPE_P (field_type))
1510 addressable = 1;
1511
1512 /* Mark the decl as nonaddressable if it is indicated so semantically,
1513 meaning we won't ever attempt to take the address of the field.
1514
1515 It may also be "technically" nonaddressable, meaning that even if we
1516 attempt to take the field's address we will actually get the address of a
1517 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1518 we have at this point is not accurate enough, so we don't account for
1519 this here and let finish_record_type decide. */
1520 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1521
1522 return field_decl;
1523 }
1524
1525 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1526 effects, has the value of zero. */
1527
1528 static int
1529 value_zerop (tree exp)
1530 {
1531 if (TREE_CODE (exp) == COMPOUND_EXPR)
1532 return value_zerop (TREE_OPERAND (exp, 1));
1533
1534 return integer_zerop (exp);
1535 }
1536 \f
1537 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1538 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1539 readonly (either an IN parameter or an address of a pass-by-ref
1540 parameter). */
1541
1542 tree
1543 create_param_decl (tree param_name, tree param_type, int readonly)
1544 {
1545 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1546
1547 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1548 lead to various ABI violations. */
1549 if (targetm.calls.promote_prototypes (param_type)
1550 && (TREE_CODE (param_type) == INTEGER_TYPE
1551 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1552 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1553 {
1554 /* We have to be careful about biased types here. Make a subtype
1555 of integer_type_node with the proper biasing. */
1556 if (TREE_CODE (param_type) == INTEGER_TYPE
1557 && TYPE_BIASED_REPRESENTATION_P (param_type))
1558 {
1559 param_type
1560 = copy_type (build_range_type (integer_type_node,
1561 TYPE_MIN_VALUE (param_type),
1562 TYPE_MAX_VALUE (param_type)));
1563
1564 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1565 }
1566 else
1567 param_type = integer_type_node;
1568 }
1569
1570 DECL_ARG_TYPE (param_decl) = param_type;
1571 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1572 TREE_READONLY (param_decl) = readonly;
1573 return param_decl;
1574 }
1575 \f
1576 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1577
1578 void
1579 process_attributes (tree decl, struct attrib *attr_list)
1580 {
1581 for (; attr_list; attr_list = attr_list->next)
1582 switch (attr_list->type)
1583 {
1584 case ATTR_MACHINE_ATTRIBUTE:
1585 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1586 NULL_TREE),
1587 ATTR_FLAG_TYPE_IN_PLACE);
1588 break;
1589
1590 case ATTR_LINK_ALIAS:
1591 TREE_STATIC (decl) = 1;
1592 assemble_alias (decl, attr_list->name);
1593 break;
1594
1595 case ATTR_WEAK_EXTERNAL:
1596 if (SUPPORTS_WEAK)
1597 declare_weak (decl);
1598 else
1599 post_error ("?weak declarations not supported on this target",
1600 attr_list->error_point);
1601 break;
1602
1603 case ATTR_LINK_SECTION:
1604 if (targetm.have_named_sections)
1605 {
1606 DECL_SECTION_NAME (decl)
1607 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1608 IDENTIFIER_POINTER (attr_list->name));
1609 DECL_COMMON (decl) = 0;
1610 }
1611 else
1612 post_error ("?section attributes are not supported for this target",
1613 attr_list->error_point);
1614 break;
1615 }
1616 }
1617 \f
1618 /* Add some pending elaborations on the list. */
1619
1620 void
1621 add_pending_elaborations (tree var_decl, tree var_init)
1622 {
1623 if (var_init != 0)
1624 Check_Elaboration_Code_Allowed (error_gnat_node);
1625
1626 pending_elaborations
1627 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1628 }
1629
1630 /* Obtain any pending elaborations and clear the old list. */
1631
1632 tree
1633 get_pending_elaborations (void)
1634 {
1635 /* Each thing added to the list went on the end; we want it on the
1636 beginning. */
1637 tree result = TREE_CHAIN (pending_elaborations);
1638
1639 TREE_CHAIN (pending_elaborations) = 0;
1640 return result;
1641 }
1642
1643 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1644 of 2. */
1645
1646 static int
1647 value_factor_p (tree value, int factor)
1648 {
1649 if (host_integerp (value, 1))
1650 return tree_low_cst (value, 1) % factor == 0;
1651
1652 if (TREE_CODE (value) == MULT_EXPR)
1653 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1654 || value_factor_p (TREE_OPERAND (value, 1), factor));
1655
1656 return 0;
1657 }
1658
1659 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1660 unless we can prove these 2 fields are laid out in such a way that no gap
1661 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1662 is the distance in bits between the end of PREV_FIELD and the starting
1663 position of CURR_FIELD. It is ignored if null. */
1664
1665 static int
1666 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1667 {
1668 /* If this is the first field of the record, there cannot be any gap */
1669 if (!prev_field)
1670 return 0;
1671
1672 /* If the previous field is a union type, then return False: The only
1673 time when such a field is not the last field of the record is when
1674 there are other components at fixed positions after it (meaning there
1675 was a rep clause for every field), in which case we don't want the
1676 alignment constraint to override them. */
1677 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1678 return 0;
1679
1680 /* If the distance between the end of prev_field and the begining of
1681 curr_field is constant, then there is a gap if the value of this
1682 constant is not null. */
1683 if (offset && host_integerp (offset, 1))
1684 return (!integer_zerop (offset));
1685
1686 /* If the size and position of the previous field are constant,
1687 then check the sum of this size and position. There will be a gap
1688 iff it is not multiple of the current field alignment. */
1689 if (host_integerp (DECL_SIZE (prev_field), 1)
1690 && host_integerp (bit_position (prev_field), 1))
1691 return ((tree_low_cst (bit_position (prev_field), 1)
1692 + tree_low_cst (DECL_SIZE (prev_field), 1))
1693 % DECL_ALIGN (curr_field) != 0);
1694
1695 /* If both the position and size of the previous field are multiples
1696 of the current field alignment, there can not be any gap. */
1697 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1698 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1699 return 0;
1700
1701 /* Fallback, return that there may be a potential gap */
1702 return 1;
1703 }
1704
1705 /* Return nonzero if there are pending elaborations. */
1706
1707 int
1708 pending_elaborations_p (void)
1709 {
1710 return TREE_CHAIN (pending_elaborations) != 0;
1711 }
1712
1713 /* Save a copy of the current pending elaboration list and make a new
1714 one. */
1715
1716 void
1717 push_pending_elaborations (void)
1718 {
1719 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1720
1721 p->next = elist_stack;
1722 p->elab_list = pending_elaborations;
1723 elist_stack = p;
1724 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1725 }
1726
1727 /* Pop the stack of pending elaborations. */
1728
1729 void
1730 pop_pending_elaborations (void)
1731 {
1732 struct e_stack *p = elist_stack;
1733
1734 pending_elaborations = p->elab_list;
1735 elist_stack = p->next;
1736 }
1737
1738 /* Return the current position in pending_elaborations so we can insert
1739 elaborations after that point. */
1740
1741 tree
1742 get_elaboration_location (void)
1743 {
1744 return tree_last (pending_elaborations);
1745 }
1746
1747 /* Insert the current elaborations after ELAB, which is in some elaboration
1748 list. */
1749
1750 void
1751 insert_elaboration_list (tree elab)
1752 {
1753 tree next = TREE_CHAIN (elab);
1754
1755 if (TREE_CHAIN (pending_elaborations))
1756 {
1757 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1758 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1759 TREE_CHAIN (pending_elaborations) = 0;
1760 }
1761 }
1762
1763 /* Returns a LABEL_DECL node for LABEL_NAME. */
1764
1765 tree
1766 create_label_decl (tree label_name)
1767 {
1768 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1769
1770 DECL_CONTEXT (label_decl) = current_function_decl;
1771 DECL_MODE (label_decl) = VOIDmode;
1772 DECL_SOURCE_LOCATION (label_decl) = input_location;
1773
1774 return label_decl;
1775 }
1776 \f
1777 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1778 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1779 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1780 PARM_DECL nodes chained through the TREE_CHAIN field).
1781
1782 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1783 appropriate fields in the FUNCTION_DECL. */
1784
1785 tree
1786 create_subprog_decl (tree subprog_name,
1787 tree asm_name,
1788 tree subprog_type,
1789 tree param_decl_list,
1790 int inline_flag,
1791 int public_flag,
1792 int extern_flag,
1793 struct attrib *attr_list)
1794 {
1795 tree return_type = TREE_TYPE (subprog_type);
1796 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1797
1798 /* If this is a function nested inside an inlined external function, it
1799 means we aren't going to compile the outer function unless it is
1800 actually inlined, so do the same for us. */
1801 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1802 && DECL_EXTERNAL (current_function_decl))
1803 extern_flag = 1;
1804
1805 DECL_EXTERNAL (subprog_decl) = extern_flag;
1806 TREE_PUBLIC (subprog_decl) = public_flag;
1807 DECL_INLINE (subprog_decl) = inline_flag;
1808 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1809 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1810 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1811 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1812 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1813
1814 if (asm_name != 0)
1815 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1816
1817 process_attributes (subprog_decl, attr_list);
1818
1819 /* Add this decl to the current binding level. */
1820 subprog_decl = pushdecl (subprog_decl);
1821
1822 /* Output the assembler code and/or RTL for the declaration. */
1823 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1824
1825 return subprog_decl;
1826 }
1827 \f
1828 /* Count how deep we are into nested functions. This is because
1829 we shouldn't call the backend function context routines unless we
1830 are in a nested function. */
1831
1832 static int function_nesting_depth;
1833
1834 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1835 body. This routine needs to be invoked before processing the declarations
1836 appearing in the subprogram. */
1837
1838 void
1839 begin_subprog_body (tree subprog_decl)
1840 {
1841 tree param_decl_list;
1842 tree param_decl;
1843 tree next_param;
1844
1845 if (function_nesting_depth++ != 0)
1846 push_function_context ();
1847
1848 announce_function (subprog_decl);
1849
1850 /* Make this field nonzero so further routines know that this is not
1851 tentative. error_mark_node is replaced below (in poplevel) with the
1852 adequate BLOCK. */
1853 DECL_INITIAL (subprog_decl) = error_mark_node;
1854
1855 /* This function exists in static storage. This does not mean `static' in
1856 the C sense! */
1857 TREE_STATIC (subprog_decl) = 1;
1858
1859 /* Enter a new binding level. */
1860 current_function_decl = subprog_decl;
1861 pushlevel (0);
1862
1863 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1864 subprogram body) so that they can be recognized as local variables in the
1865 subprogram.
1866
1867 The list of PARM_DECL nodes is stored in the right order in
1868 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1869 which they are transmitted to `pushdecl' we need to reverse the list of
1870 PARM_DECLs if we want it to be stored in the right order. The reason why
1871 we want to make sure the PARM_DECLs are stored in the correct order is
1872 that this list will be retrieved in a few lines with a call to `getdecl'
1873 to store it back into the DECL_ARGUMENTS field. */
1874 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1875
1876 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1877 {
1878 next_param = TREE_CHAIN (param_decl);
1879 TREE_CHAIN (param_decl) = NULL;
1880 pushdecl (param_decl);
1881 }
1882
1883 /* Store back the PARM_DECL nodes. They appear in the right order. */
1884 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1885
1886 init_function_start (subprog_decl);
1887 expand_function_start (subprog_decl, 0);
1888
1889 /* If this function is `main', emit a call to `__main'
1890 to run global initializers, etc. */
1891 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1892 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1893 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1894 expand_main_function ();
1895 }
1896
1897 /* Finish the definition of the current subprogram and compile it all the way
1898 to assembler language output. */
1899
1900 void
1901 end_subprog_body (void)
1902 {
1903 tree decl;
1904 tree cico_list;
1905
1906 poplevel (1, 0, 1);
1907 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1908 = current_function_decl;
1909
1910 /* Mark the RESULT_DECL as being in this subprogram. */
1911 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1912
1913 expand_function_end ();
1914
1915 /* If this is a nested function, push a new GC context. That will keep
1916 local variables on the stack from being collected while we're doing
1917 the compilation of this function. */
1918 if (function_nesting_depth > 1)
1919 ggc_push_context ();
1920
1921 /* If we're only annotating types, don't actually compile this
1922 function. */
1923 if (!type_annotate_only)
1924 {
1925 rest_of_compilation (current_function_decl);
1926 if (! DECL_DEFER_OUTPUT (current_function_decl))
1927 {
1928 free_after_compilation (cfun);
1929 DECL_STRUCT_FUNCTION (current_function_decl) = 0;
1930 }
1931 cfun = 0;
1932 }
1933
1934 if (function_nesting_depth > 1)
1935 ggc_pop_context ();
1936
1937 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1938 not be seen when we call this function and will be in
1939 unallocated memory anyway. */
1940 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1941 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1942 TREE_VALUE (cico_list) = 0;
1943
1944 if (DECL_STRUCT_FUNCTION (current_function_decl) == 0)
1945 {
1946 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1947 was saved for inline, in which case the DECL_RTLs are in
1948 preserved memory. */
1949 for (decl = DECL_ARGUMENTS (current_function_decl);
1950 decl != 0; decl = TREE_CHAIN (decl))
1951 {
1952 SET_DECL_RTL (decl, 0);
1953 DECL_INCOMING_RTL (decl) = 0;
1954 }
1955
1956 /* Similarly, discard DECL_RTL of the return value. */
1957 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1958
1959 /* But DECL_INITIAL must remain nonzero so we know this
1960 was an actual function definition unless toplev.c decided not
1961 to inline it. */
1962 if (DECL_INITIAL (current_function_decl) != 0)
1963 DECL_INITIAL (current_function_decl) = error_mark_node;
1964
1965 DECL_ARGUMENTS (current_function_decl) = 0;
1966 }
1967
1968 /* If we are not at the bottom of the function nesting stack, pop up to
1969 the containing function. Otherwise show we aren't in any function. */
1970 if (--function_nesting_depth != 0)
1971 pop_function_context ();
1972 else
1973 current_function_decl = 0;
1974 }
1975 \f
1976 /* Return a definition for a builtin function named NAME and whose data type
1977 is TYPE. TYPE should be a function type with argument types.
1978 FUNCTION_CODE tells later passes how to compile calls to this function.
1979 See tree.h for its possible values.
1980
1981 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1982 the name to be called if we can't opencode the function. If
1983 ATTRS is nonzero, use that for the function attribute list. */
1984
1985 tree
1986 builtin_function (const char *name,
1987 tree type,
1988 int function_code,
1989 enum built_in_class class,
1990 const char *library_name,
1991 tree attrs)
1992 {
1993 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1994
1995 DECL_EXTERNAL (decl) = 1;
1996 TREE_PUBLIC (decl) = 1;
1997 if (library_name)
1998 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1999
2000 pushdecl (decl);
2001 DECL_BUILT_IN_CLASS (decl) = class;
2002 DECL_FUNCTION_CODE (decl) = function_code;
2003 if (attrs)
2004 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
2005 return decl;
2006 }
2007
2008 /* Return an integer type with the number of bits of precision given by
2009 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2010 it is a signed type. */
2011
2012 tree
2013 gnat_type_for_size (unsigned precision, int unsignedp)
2014 {
2015 tree t;
2016 char type_name[20];
2017
2018 if (precision <= 2 * MAX_BITS_PER_WORD
2019 && signed_and_unsigned_types[precision][unsignedp] != 0)
2020 return signed_and_unsigned_types[precision][unsignedp];
2021
2022 if (unsignedp)
2023 t = make_unsigned_type (precision);
2024 else
2025 t = make_signed_type (precision);
2026
2027 if (precision <= 2 * MAX_BITS_PER_WORD)
2028 signed_and_unsigned_types[precision][unsignedp] = t;
2029
2030 if (TYPE_NAME (t) == 0)
2031 {
2032 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2033 TYPE_NAME (t) = get_identifier (type_name);
2034 }
2035
2036 return t;
2037 }
2038
2039 /* Likewise for floating-point types. */
2040
2041 static tree
2042 float_type_for_precision (int precision, enum machine_mode mode)
2043 {
2044 tree t;
2045 char type_name[20];
2046
2047 if (float_types[(int) mode] != 0)
2048 return float_types[(int) mode];
2049
2050 float_types[(int) mode] = t = make_node (REAL_TYPE);
2051 TYPE_PRECISION (t) = precision;
2052 layout_type (t);
2053
2054 if (TYPE_MODE (t) != mode)
2055 gigi_abort (414);
2056
2057 if (TYPE_NAME (t) == 0)
2058 {
2059 sprintf (type_name, "FLOAT_%d", precision);
2060 TYPE_NAME (t) = get_identifier (type_name);
2061 }
2062
2063 return t;
2064 }
2065
2066 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2067 an unsigned type; otherwise a signed type is returned. */
2068
2069 tree
2070 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2071 {
2072 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2073 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2074 else
2075 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2076 }
2077
2078 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2079
2080 tree
2081 gnat_unsigned_type (tree type_node)
2082 {
2083 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2084
2085 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2086 {
2087 type = copy_node (type);
2088 TREE_TYPE (type) = type_node;
2089 }
2090 else if (TREE_TYPE (type_node) != 0
2091 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2092 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2093 {
2094 type = copy_node (type);
2095 TREE_TYPE (type) = TREE_TYPE (type_node);
2096 }
2097
2098 return type;
2099 }
2100
2101 /* Return the signed version of a TYPE_NODE, a scalar type. */
2102
2103 tree
2104 gnat_signed_type (tree type_node)
2105 {
2106 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2107
2108 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2109 {
2110 type = copy_node (type);
2111 TREE_TYPE (type) = type_node;
2112 }
2113 else if (TREE_TYPE (type_node) != 0
2114 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2115 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2116 {
2117 type = copy_node (type);
2118 TREE_TYPE (type) = TREE_TYPE (type_node);
2119 }
2120
2121 return type;
2122 }
2123
2124 /* Return a type the same as TYPE except unsigned or signed according to
2125 UNSIGNEDP. */
2126
2127 tree
2128 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2129 {
2130 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
2131 return type;
2132 else
2133 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2134 }
2135 \f
2136 /* EXP is an expression for the size of an object. If this size contains
2137 discriminant references, replace them with the maximum (if MAX_P) or
2138 minimum (if ! MAX_P) possible value of the discriminant. */
2139
2140 tree
2141 max_size (tree exp, int max_p)
2142 {
2143 enum tree_code code = TREE_CODE (exp);
2144 tree type = TREE_TYPE (exp);
2145
2146 switch (TREE_CODE_CLASS (code))
2147 {
2148 case 'd':
2149 case 'c':
2150 return exp;
2151
2152 case 'x':
2153 if (code == TREE_LIST)
2154 return tree_cons (TREE_PURPOSE (exp),
2155 max_size (TREE_VALUE (exp), max_p),
2156 TREE_CHAIN (exp) != 0
2157 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2158 break;
2159
2160 case 'r':
2161 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2162 modify. Otherwise, we treat it like a variable. */
2163 if (! CONTAINS_PLACEHOLDER_P (exp))
2164 return exp;
2165
2166 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2167 return
2168 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2169
2170 case '<':
2171 return max_p ? size_one_node : size_zero_node;
2172
2173 case '1':
2174 case '2':
2175 case 'e':
2176 switch (TREE_CODE_LENGTH (code))
2177 {
2178 case 1:
2179 if (code == NON_LVALUE_EXPR)
2180 return max_size (TREE_OPERAND (exp, 0), max_p);
2181 else
2182 return
2183 fold (build1 (code, type,
2184 max_size (TREE_OPERAND (exp, 0),
2185 code == NEGATE_EXPR ? ! max_p : max_p)));
2186
2187 case 2:
2188 if (code == RTL_EXPR)
2189 gigi_abort (407);
2190 else if (code == COMPOUND_EXPR)
2191 return max_size (TREE_OPERAND (exp, 1), max_p);
2192
2193 {
2194 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2195 tree rhs = max_size (TREE_OPERAND (exp, 1),
2196 code == MINUS_EXPR ? ! max_p : max_p);
2197
2198 /* Special-case wanting the maximum value of a MIN_EXPR.
2199 In that case, if one side overflows, return the other.
2200 sizetype is signed, but we know sizes are non-negative.
2201 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2202 overflowing or the maximum possible value and the RHS
2203 a variable. */
2204 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2205 return lhs;
2206 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2207 return rhs;
2208 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2209 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2210 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2211 && ! TREE_CONSTANT (rhs))
2212 return lhs;
2213 else
2214 return fold (build (code, type, lhs, rhs));
2215 }
2216
2217 case 3:
2218 if (code == SAVE_EXPR)
2219 return exp;
2220 else if (code == COND_EXPR)
2221 return fold (build (MAX_EXPR, type,
2222 max_size (TREE_OPERAND (exp, 1), max_p),
2223 max_size (TREE_OPERAND (exp, 2), max_p)));
2224 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2225 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2226 max_size (TREE_OPERAND (exp, 1), max_p));
2227 }
2228 }
2229
2230 gigi_abort (408);
2231 }
2232 \f
2233 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2234 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2235 Return a constructor for the template. */
2236
2237 tree
2238 build_template (tree template_type, tree array_type, tree expr)
2239 {
2240 tree template_elts = NULL_TREE;
2241 tree bound_list = NULL_TREE;
2242 tree field;
2243
2244 if (TREE_CODE (array_type) == RECORD_TYPE
2245 && (TYPE_IS_PADDING_P (array_type)
2246 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2247 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2248
2249 if (TREE_CODE (array_type) == ARRAY_TYPE
2250 || (TREE_CODE (array_type) == INTEGER_TYPE
2251 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2252 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2253
2254 /* First make the list for a CONSTRUCTOR for the template. Go down the
2255 field list of the template instead of the type chain because this
2256 array might be an Ada array of arrays and we can't tell where the
2257 nested arrays stop being the underlying object. */
2258
2259 for (field = TYPE_FIELDS (template_type); field;
2260 (bound_list != 0
2261 ? (bound_list = TREE_CHAIN (bound_list))
2262 : (array_type = TREE_TYPE (array_type))),
2263 field = TREE_CHAIN (TREE_CHAIN (field)))
2264 {
2265 tree bounds, min, max;
2266
2267 /* If we have a bound list, get the bounds from there. Likewise
2268 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2269 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2270 This will give us a maximum range. */
2271 if (bound_list != 0)
2272 bounds = TREE_VALUE (bound_list);
2273 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2274 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2275 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2276 && DECL_BY_COMPONENT_PTR_P (expr))
2277 bounds = TREE_TYPE (field);
2278 else
2279 gigi_abort (411);
2280
2281 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2282 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2283
2284 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2285 substitute it from OBJECT. */
2286 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2287 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2288
2289 template_elts = tree_cons (TREE_CHAIN (field), max,
2290 tree_cons (field, min, template_elts));
2291 }
2292
2293 return gnat_build_constructor (template_type, nreverse (template_elts));
2294 }
2295 \f
2296 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2297 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2298 in the type contains in its DECL_INITIAL the expression to use when
2299 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2300 to print out an error message if the mechanism cannot be applied to
2301 an object of that type and also for the name. */
2302
2303 tree
2304 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2305 {
2306 tree record_type = make_node (RECORD_TYPE);
2307 tree field_list = 0;
2308 int class;
2309 int dtype = 0;
2310 tree inner_type;
2311 int ndim;
2312 int i;
2313 tree *idx_arr;
2314 tree tem;
2315
2316 /* If TYPE is an unconstrained array, use the underlying array type. */
2317 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2318 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2319
2320 /* If this is an array, compute the number of dimensions in the array,
2321 get the index types, and point to the inner type. */
2322 if (TREE_CODE (type) != ARRAY_TYPE)
2323 ndim = 0;
2324 else
2325 for (ndim = 1, inner_type = type;
2326 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2327 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2328 ndim++, inner_type = TREE_TYPE (inner_type))
2329 ;
2330
2331 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2332
2333 if (mech != By_Descriptor_NCA
2334 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2335 for (i = ndim - 1, inner_type = type;
2336 i >= 0;
2337 i--, inner_type = TREE_TYPE (inner_type))
2338 idx_arr[i] = TYPE_DOMAIN (inner_type);
2339 else
2340 for (i = 0, inner_type = type;
2341 i < ndim;
2342 i++, inner_type = TREE_TYPE (inner_type))
2343 idx_arr[i] = TYPE_DOMAIN (inner_type);
2344
2345 /* Now get the DTYPE value. */
2346 switch (TREE_CODE (type))
2347 {
2348 case INTEGER_TYPE:
2349 case ENUMERAL_TYPE:
2350 if (TYPE_VAX_FLOATING_POINT_P (type))
2351 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2352 {
2353 case 6:
2354 dtype = 10;
2355 break;
2356 case 9:
2357 dtype = 11;
2358 break;
2359 case 15:
2360 dtype = 27;
2361 break;
2362 }
2363 else
2364 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2365 {
2366 case 8:
2367 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2368 break;
2369 case 16:
2370 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2371 break;
2372 case 32:
2373 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2374 break;
2375 case 64:
2376 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2377 break;
2378 case 128:
2379 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2380 break;
2381 }
2382 break;
2383
2384 case REAL_TYPE:
2385 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2386 break;
2387
2388 case COMPLEX_TYPE:
2389 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2390 && TYPE_VAX_FLOATING_POINT_P (type))
2391 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2392 {
2393 case 6:
2394 dtype = 12;
2395 break;
2396 case 9:
2397 dtype = 13;
2398 break;
2399 case 15:
2400 dtype = 29;
2401 }
2402 else
2403 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2404 break;
2405
2406 case ARRAY_TYPE:
2407 dtype = 14;
2408 break;
2409
2410 default:
2411 break;
2412 }
2413
2414 /* Get the CLASS value. */
2415 switch (mech)
2416 {
2417 case By_Descriptor_A:
2418 class = 4;
2419 break;
2420 case By_Descriptor_NCA:
2421 class = 10;
2422 break;
2423 case By_Descriptor_SB:
2424 class = 15;
2425 break;
2426 default:
2427 class = 1;
2428 }
2429
2430 /* Make the type for a descriptor for VMS. The first four fields
2431 are the same for all types. */
2432
2433 field_list
2434 = chainon (field_list,
2435 make_descriptor_field
2436 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2437 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2438
2439 field_list = chainon (field_list,
2440 make_descriptor_field ("DTYPE",
2441 gnat_type_for_size (8, 1),
2442 record_type, size_int (dtype)));
2443 field_list = chainon (field_list,
2444 make_descriptor_field ("CLASS",
2445 gnat_type_for_size (8, 1),
2446 record_type, size_int (class)));
2447
2448 field_list
2449 = chainon (field_list,
2450 make_descriptor_field ("POINTER",
2451 build_pointer_type (type),
2452 record_type,
2453 build1 (ADDR_EXPR,
2454 build_pointer_type (type),
2455 build (PLACEHOLDER_EXPR,
2456 type))));
2457
2458 switch (mech)
2459 {
2460 case By_Descriptor:
2461 case By_Descriptor_S:
2462 break;
2463
2464 case By_Descriptor_SB:
2465 field_list
2466 = chainon (field_list,
2467 make_descriptor_field
2468 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2469 TREE_CODE (type) == ARRAY_TYPE
2470 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2471 field_list
2472 = chainon (field_list,
2473 make_descriptor_field
2474 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2475 TREE_CODE (type) == ARRAY_TYPE
2476 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2477 break;
2478
2479 case By_Descriptor_A:
2480 case By_Descriptor_NCA:
2481 field_list = chainon (field_list,
2482 make_descriptor_field ("SCALE",
2483 gnat_type_for_size (8, 1),
2484 record_type,
2485 size_zero_node));
2486
2487 field_list = chainon (field_list,
2488 make_descriptor_field ("DIGITS",
2489 gnat_type_for_size (8, 1),
2490 record_type,
2491 size_zero_node));
2492
2493 field_list
2494 = chainon (field_list,
2495 make_descriptor_field
2496 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2497 size_int (mech == By_Descriptor_NCA
2498 ? 0
2499 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2500 : (TREE_CODE (type) == ARRAY_TYPE
2501 && TYPE_CONVENTION_FORTRAN_P (type)
2502 ? 224 : 192))));
2503
2504 field_list = chainon (field_list,
2505 make_descriptor_field ("DIMCT",
2506 gnat_type_for_size (8, 1),
2507 record_type,
2508 size_int (ndim)));
2509
2510 field_list = chainon (field_list,
2511 make_descriptor_field ("ARSIZE",
2512 gnat_type_for_size (32, 1),
2513 record_type,
2514 size_in_bytes (type)));
2515
2516 /* Now build a pointer to the 0,0,0... element. */
2517 tem = build (PLACEHOLDER_EXPR, type);
2518 for (i = 0, inner_type = type; i < ndim;
2519 i++, inner_type = TREE_TYPE (inner_type))
2520 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2521 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2522
2523 field_list
2524 = chainon (field_list,
2525 make_descriptor_field
2526 ("A0", build_pointer_type (inner_type), record_type,
2527 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2528
2529 /* Next come the addressing coefficients. */
2530 tem = size_int (1);
2531 for (i = 0; i < ndim; i++)
2532 {
2533 char fname[3];
2534 tree idx_length
2535 = size_binop (MULT_EXPR, tem,
2536 size_binop (PLUS_EXPR,
2537 size_binop (MINUS_EXPR,
2538 TYPE_MAX_VALUE (idx_arr[i]),
2539 TYPE_MIN_VALUE (idx_arr[i])),
2540 size_int (1)));
2541
2542 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2543 fname[1] = '0' + i, fname[2] = 0;
2544 field_list
2545 = chainon (field_list,
2546 make_descriptor_field (fname,
2547 gnat_type_for_size (32, 1),
2548 record_type, idx_length));
2549
2550 if (mech == By_Descriptor_NCA)
2551 tem = idx_length;
2552 }
2553
2554 /* Finally here are the bounds. */
2555 for (i = 0; i < ndim; i++)
2556 {
2557 char fname[3];
2558
2559 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2560 field_list
2561 = chainon (field_list,
2562 make_descriptor_field
2563 (fname, gnat_type_for_size (32, 1), record_type,
2564 TYPE_MIN_VALUE (idx_arr[i])));
2565
2566 fname[0] = 'U';
2567 field_list
2568 = chainon (field_list,
2569 make_descriptor_field
2570 (fname, gnat_type_for_size (32, 1), record_type,
2571 TYPE_MAX_VALUE (idx_arr[i])));
2572 }
2573 break;
2574
2575 default:
2576 post_error ("unsupported descriptor type for &", gnat_entity);
2577 }
2578
2579 finish_record_type (record_type, field_list, 0, 1);
2580 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2581 record_type));
2582
2583 return record_type;
2584 }
2585
2586 /* Utility routine for above code to make a field. */
2587
2588 static tree
2589 make_descriptor_field (const char *name, tree type,
2590 tree rec_type, tree initial)
2591 {
2592 tree field
2593 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2594
2595 DECL_INITIAL (field) = initial;
2596 return field;
2597 }
2598 \f
2599 /* Build a type to be used to represent an aliased object whose nominal
2600 type is an unconstrained array. This consists of a RECORD_TYPE containing
2601 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2602 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2603 is used to represent an arbitrary unconstrained object. Use NAME
2604 as the name of the record. */
2605
2606 tree
2607 build_unc_object_type (tree template_type, tree object_type, tree name)
2608 {
2609 tree type = make_node (RECORD_TYPE);
2610 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2611 template_type, type, 0, 0, 0, 1);
2612 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2613 type, 0, 0, 0, 1);
2614
2615 TYPE_NAME (type) = name;
2616 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2617 finish_record_type (type,
2618 chainon (chainon (NULL_TREE, template_field),
2619 array_field),
2620 0, 0);
2621
2622 return type;
2623 }
2624 \f
2625 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2626 the normal case this is just two adjustments, but we have more to do
2627 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2628
2629 void
2630 update_pointer_to (tree old_type, tree new_type)
2631 {
2632 tree ptr = TYPE_POINTER_TO (old_type);
2633 tree ref = TYPE_REFERENCE_TO (old_type);
2634 tree type;
2635
2636 /* If this is the main variant, process all the other variants first. */
2637 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2638 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2639 type = TYPE_NEXT_VARIANT (type))
2640 update_pointer_to (type, new_type);
2641
2642 /* If no pointer or reference, we are done. */
2643 if (ptr == 0 && ref == 0)
2644 return;
2645
2646 /* Merge the old type qualifiers in the new type.
2647
2648 Each old variant has qualifiers for specific reasons, and the new
2649 designated type as well. Each set of qualifiers represents useful
2650 information grabbed at some point, and merging the two simply unifies
2651 these inputs into the final type description.
2652
2653 Consider for instance a volatile type frozen after an access to constant
2654 type designating it. After the designated type freeze, we get here with a
2655 volatile new_type and a dummy old_type with a readonly variant, created
2656 when the access type was processed. We shall make a volatile and readonly
2657 designated type, because that's what it really is.
2658
2659 We might also get here for a non-dummy old_type variant with different
2660 qualifiers than the new_type ones, for instance in some cases of pointers
2661 to private record type elaboration (see the comments around the call to
2662 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2663 qualifiers in thoses cases too, to avoid accidentally discarding the
2664 initial set, and will often end up with old_type == new_type then. */
2665 new_type = build_qualified_type (new_type,
2666 TYPE_QUALS (old_type)
2667 | TYPE_QUALS (new_type));
2668
2669 /* If the new type and the old one are identical, there is nothing to
2670 update. */
2671 if (old_type == new_type)
2672 return;
2673
2674 /* Otherwise, first handle the simple case. */
2675 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2676 {
2677 TYPE_POINTER_TO (new_type) = ptr;
2678 TYPE_REFERENCE_TO (new_type) = ref;
2679
2680 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2681 {
2682 TREE_TYPE (ptr) = new_type;
2683
2684 if (TYPE_NAME (ptr) != 0
2685 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2686 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2687 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2688 global_bindings_p (), 0);
2689 }
2690
2691 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2692 {
2693 TREE_TYPE (ref) = new_type;
2694
2695 if (TYPE_NAME (ref) != 0
2696 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2697 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2698 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2699 global_bindings_p (), 0);
2700 }
2701 }
2702
2703 /* Now deal with the unconstrained array case. In this case the "pointer"
2704 is actually a RECORD_TYPE where the types of both fields are
2705 pointers to void. In that case, copy the field list from the
2706 old type to the new one and update the fields' context. */
2707 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2708 gigi_abort (412);
2709
2710 else
2711 {
2712 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2713 tree ptr_temp_type;
2714 tree new_ref;
2715 tree var;
2716
2717 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2718 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2719 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2720
2721 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2722 template bounds.
2723
2724 ??? This is now the only use of gnat_substitute_in_type, which
2725 is now a very "heavy" routine to do this, so it should be replaced
2726 at some point. */
2727 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2728 new_ref = build (COMPONENT_REF, ptr_temp_type,
2729 build (PLACEHOLDER_EXPR, ptr),
2730 TREE_CHAIN (TYPE_FIELDS (ptr)));
2731
2732 update_pointer_to
2733 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2734 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2735 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2736
2737 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2738 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2739
2740 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2741 = TREE_TYPE (new_type) = ptr;
2742
2743 /* Now handle updating the allocation record, what the thin pointer
2744 points to. Update all pointers from the old record into the new
2745 one, update the types of the fields, and recompute the size. */
2746
2747 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2748
2749 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2750 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2751 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2752 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2753 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2754 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2755 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2756
2757 TYPE_SIZE (new_obj_rec)
2758 = size_binop (PLUS_EXPR,
2759 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2760 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2761 TYPE_SIZE_UNIT (new_obj_rec)
2762 = size_binop (PLUS_EXPR,
2763 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2764 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2765 rest_of_type_compilation (ptr, global_bindings_p ());
2766 }
2767 }
2768 \f
2769 /* Convert a pointer to a constrained array into a pointer to a fat
2770 pointer. This involves making or finding a template. */
2771
2772 static tree
2773 convert_to_fat_pointer (tree type, tree expr)
2774 {
2775 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2776 tree template, template_addr;
2777 tree etype = TREE_TYPE (expr);
2778
2779 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2780 pointer to the template and array. */
2781 if (integer_zerop (expr))
2782 return
2783 gnat_build_constructor
2784 (type,
2785 tree_cons (TYPE_FIELDS (type),
2786 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2787 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2788 convert (build_pointer_type (template_type),
2789 expr),
2790 NULL_TREE)));
2791
2792 /* If EXPR is a thin pointer, make the template and data from the record. */
2793
2794 else if (TYPE_THIN_POINTER_P (etype))
2795 {
2796 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2797
2798 expr = save_expr (expr);
2799 if (TREE_CODE (expr) == ADDR_EXPR)
2800 expr = TREE_OPERAND (expr, 0);
2801 else
2802 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2803
2804 template = build_component_ref (expr, NULL_TREE, fields, 0);
2805 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2806 build_component_ref (expr, NULL_TREE,
2807 TREE_CHAIN (fields), 0));
2808 }
2809 else
2810 /* Otherwise, build the constructor for the template. */
2811 template = build_template (template_type, TREE_TYPE (etype), expr);
2812
2813 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2814
2815 /* The result is a CONSTRUCTOR for the fat pointer.
2816
2817 If expr is an argument of a foreign convention subprogram, the type it
2818 points to is directly the component type. In this case, the expression
2819 type may not match the corresponding FIELD_DECL type at this point, so we
2820 call "convert" here to fix that up if necessary. This type consistency is
2821 required, for instance because it ensures that possible later folding of
2822 component_refs against this constructor always yields something of the
2823 same type as the initial reference.
2824
2825 Note that the call to "build_template" above is still fine, because it
2826 will only refer to the provided template_type in this case. */
2827 return
2828 gnat_build_constructor
2829 (type, tree_cons (TYPE_FIELDS (type),
2830 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2831 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2832 template_addr, NULL_TREE)));
2833 }
2834 \f
2835 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2836 is something that is a fat pointer, so convert to it first if it EXPR
2837 is not already a fat pointer. */
2838
2839 static tree
2840 convert_to_thin_pointer (tree type, tree expr)
2841 {
2842 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2843 expr
2844 = convert_to_fat_pointer
2845 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2846
2847 /* We get the pointer to the data and use a NOP_EXPR to make it the
2848 proper GCC type. */
2849 expr
2850 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2851 expr = build1 (NOP_EXPR, type, expr);
2852
2853 return expr;
2854 }
2855 \f
2856 /* Create an expression whose value is that of EXPR,
2857 converted to type TYPE. The TREE_TYPE of the value
2858 is always TYPE. This function implements all reasonable
2859 conversions; callers should filter out those that are
2860 not permitted by the language being compiled. */
2861
2862 tree
2863 convert (tree type, tree expr)
2864 {
2865 enum tree_code code = TREE_CODE (type);
2866 tree etype = TREE_TYPE (expr);
2867 enum tree_code ecode = TREE_CODE (etype);
2868 tree tem;
2869
2870 /* If EXPR is already the right type, we are done. */
2871 if (type == etype)
2872 return expr;
2873 /* If we're converting between two aggregate types that have the same main
2874 variant, just make a NOP_EXPR. */
2875 else if (AGGREGATE_TYPE_P (type)
2876 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2877 return build1 (NOP_EXPR, type, expr);
2878
2879 /* If the input type has padding, remove it by doing a component reference
2880 to the field. If the output type has padding, make a constructor
2881 to build the record. If both input and output have padding and are
2882 of variable size, do this as an unchecked conversion. */
2883 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2884 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2885 && (! TREE_CONSTANT (TYPE_SIZE (type))
2886 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2887 ;
2888 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2889 {
2890 /* If we have just converted to this padded type, just get
2891 the inner expression. */
2892 if (TREE_CODE (expr) == CONSTRUCTOR
2893 && CONSTRUCTOR_ELTS (expr) != 0
2894 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2895 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2896 else
2897 return convert (type, build_component_ref (expr, NULL_TREE,
2898 TYPE_FIELDS (etype), 0));
2899 }
2900 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2901 {
2902 /* If we previously converted from another type and our type is
2903 of variable size, remove the conversion to avoid the need for
2904 variable-size temporaries. */
2905 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2906 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2907 expr = TREE_OPERAND (expr, 0);
2908
2909 /* If we are just removing the padding from expr, convert the original
2910 object if we have variable size. That will avoid the need
2911 for some variable-size temporaries. */
2912 if (TREE_CODE (expr) == COMPONENT_REF
2913 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2914 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2915 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2916 return convert (type, TREE_OPERAND (expr, 0));
2917
2918 /* If the result type is a padded type with a self-referentially-sized
2919 field and the expression type is a record, do this as an
2920 unchecked converstion. */
2921 else if (TREE_CODE (etype) == RECORD_TYPE
2922 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2923 return unchecked_convert (type, expr, 0);
2924
2925 else
2926 return
2927 gnat_build_constructor (type,
2928 tree_cons (TYPE_FIELDS (type),
2929 convert (TREE_TYPE
2930 (TYPE_FIELDS (type)),
2931 expr),
2932 NULL_TREE));
2933 }
2934
2935 /* If the input is a biased type, adjust first. */
2936 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2937 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2938 fold (build1 (GNAT_NOP_EXPR,
2939 TREE_TYPE (etype), expr)),
2940 TYPE_MIN_VALUE (etype))));
2941
2942 /* If the input is a left-justified modular type, we need to extract
2943 the actual object before converting it to any other type with the
2944 exception of an unconstrained array. */
2945 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2946 && code != UNCONSTRAINED_ARRAY_TYPE)
2947 return convert (type, build_component_ref (expr, NULL_TREE,
2948 TYPE_FIELDS (etype), 0));
2949
2950 /* If converting to a type that contains a template, convert to the data
2951 type and then build the template. */
2952 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2953 {
2954 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2955
2956 /* If the source already has a template, get a reference to the
2957 associated array only, as we are going to rebuild a template
2958 for the target type anyway. */
2959 expr = maybe_unconstrained_array (expr);
2960
2961 return
2962 gnat_build_constructor
2963 (type,
2964 tree_cons (TYPE_FIELDS (type),
2965 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2966 obj_type, NULL_TREE),
2967 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2968 convert (obj_type, expr), NULL_TREE)));
2969 }
2970
2971 /* There are some special cases of expressions that we process
2972 specially. */
2973 switch (TREE_CODE (expr))
2974 {
2975 case ERROR_MARK:
2976 return expr;
2977
2978 case TRANSFORM_EXPR:
2979 case NULL_EXPR:
2980 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2981 conversion in gnat_expand_expr. NULL_EXPR does not represent
2982 and actual value, so no conversion is needed. */
2983 expr = copy_node (expr);
2984 TREE_TYPE (expr) = type;
2985 return expr;
2986
2987 case STRING_CST:
2988 case CONSTRUCTOR:
2989 /* If we are converting a STRING_CST to another constrained array type,
2990 just make a new one in the proper type. Likewise for a
2991 CONSTRUCTOR. */
2992 if (code == ecode && AGGREGATE_TYPE_P (etype)
2993 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2994 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2995 {
2996 expr = copy_node (expr);
2997 TREE_TYPE (expr) = type;
2998 return expr;
2999 }
3000 break;
3001
3002 case COMPONENT_REF:
3003 /* If we are converting between two aggregate types of the same
3004 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3005 This avoid unneeded conversions which makes reference computations
3006 more complex. */
3007 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
3008 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3009 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3010 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
3011 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
3012 TREE_OPERAND (expr, 1));
3013
3014 break;
3015
3016 case UNCONSTRAINED_ARRAY_REF:
3017 /* Convert this to the type of the inner array by getting the address of
3018 the array from the template. */
3019 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3020 build_component_ref (TREE_OPERAND (expr, 0),
3021 get_identifier ("P_ARRAY"),
3022 NULL_TREE, 0));
3023 etype = TREE_TYPE (expr);
3024 ecode = TREE_CODE (etype);
3025 break;
3026
3027 case VIEW_CONVERT_EXPR:
3028 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3029 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3030 return convert (type, TREE_OPERAND (expr, 0));
3031 break;
3032
3033 case INDIRECT_REF:
3034 /* If both types are record types, just convert the pointer and
3035 make a new INDIRECT_REF.
3036
3037 ??? Disable this for now since it causes problems with the
3038 code in build_binary_op for MODIFY_EXPR which wants to
3039 strip off conversions. But that code really is a mess and
3040 we need to do this a much better way some time. */
3041 if (0
3042 && (TREE_CODE (type) == RECORD_TYPE
3043 || TREE_CODE (type) == UNION_TYPE)
3044 && (TREE_CODE (etype) == RECORD_TYPE
3045 || TREE_CODE (etype) == UNION_TYPE)
3046 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3047 return build_unary_op (INDIRECT_REF, NULL_TREE,
3048 convert (build_pointer_type (type),
3049 TREE_OPERAND (expr, 0)));
3050 break;
3051
3052 default:
3053 break;
3054 }
3055
3056 /* Check for converting to a pointer to an unconstrained array. */
3057 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3058 return convert_to_fat_pointer (type, expr);
3059
3060 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3061 || (code == INTEGER_CST && ecode == INTEGER_CST
3062 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3063 return fold (build1 (NOP_EXPR, type, expr));
3064
3065 switch (code)
3066 {
3067 case VOID_TYPE:
3068 return build1 (CONVERT_EXPR, type, expr);
3069
3070 case INTEGER_TYPE:
3071 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3072 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3073 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3074 return unchecked_convert (type, expr, 0);
3075 else if (TYPE_BIASED_REPRESENTATION_P (type))
3076 return fold (build1 (CONVERT_EXPR, type,
3077 fold (build (MINUS_EXPR, TREE_TYPE (type),
3078 convert (TREE_TYPE (type), expr),
3079 TYPE_MIN_VALUE (type)))));
3080
3081 /* ... fall through ... */
3082
3083 case ENUMERAL_TYPE:
3084 return fold (convert_to_integer (type, expr));
3085
3086 case POINTER_TYPE:
3087 case REFERENCE_TYPE:
3088 /* If converting between two pointers to records denoting
3089 both a template and type, adjust if needed to account
3090 for any differing offsets, since one might be negative. */
3091 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3092 {
3093 tree bit_diff
3094 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3095 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3096 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3097 sbitsize_int (BITS_PER_UNIT));
3098
3099 expr = build1 (NOP_EXPR, type, expr);
3100 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3101 if (integer_zerop (byte_diff))
3102 return expr;
3103
3104 return build_binary_op (PLUS_EXPR, type, expr,
3105 fold (convert_to_pointer (type, byte_diff)));
3106 }
3107
3108 /* If converting to a thin pointer, handle specially. */
3109 if (TYPE_THIN_POINTER_P (type)
3110 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3111 return convert_to_thin_pointer (type, expr);
3112
3113 /* If converting fat pointer to normal pointer, get the pointer to the
3114 array and then convert it. */
3115 else if (TYPE_FAT_POINTER_P (etype))
3116 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3117 NULL_TREE, 0);
3118
3119 return fold (convert_to_pointer (type, expr));
3120
3121 case REAL_TYPE:
3122 return fold (convert_to_real (type, expr));
3123
3124 case RECORD_TYPE:
3125 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3126 return
3127 gnat_build_constructor
3128 (type, tree_cons (TYPE_FIELDS (type),
3129 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3130 NULL_TREE));
3131
3132 /* ... fall through ... */
3133
3134 case ARRAY_TYPE:
3135 /* In these cases, assume the front-end has validated the conversion.
3136 If the conversion is valid, it will be a bit-wise conversion, so
3137 it can be viewed as an unchecked conversion. */
3138 return unchecked_convert (type, expr, 0);
3139
3140 case UNION_TYPE:
3141 /* Just validate that the type is indeed that of a field
3142 of the type. Then make the simple conversion. */
3143 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3144 {
3145 if (TREE_TYPE (tem) == etype)
3146 return build1 (CONVERT_EXPR, type, expr);
3147 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3148 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3149 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3150 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3151 return build1 (CONVERT_EXPR, type,
3152 convert (TREE_TYPE (tem), expr));
3153 }
3154
3155 gigi_abort (413);
3156
3157 case UNCONSTRAINED_ARRAY_TYPE:
3158 /* If EXPR is a constrained array, take its address, convert it to a
3159 fat pointer, and then dereference it. Likewise if EXPR is a
3160 record containing both a template and a constrained array.
3161 Note that a record representing a left justified modular type
3162 always represents a packed constrained array. */
3163 if (ecode == ARRAY_TYPE
3164 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3165 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3166 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3167 return
3168 build_unary_op
3169 (INDIRECT_REF, NULL_TREE,
3170 convert_to_fat_pointer (TREE_TYPE (type),
3171 build_unary_op (ADDR_EXPR,
3172 NULL_TREE, expr)));
3173
3174 /* Do something very similar for converting one unconstrained
3175 array to another. */
3176 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3177 return
3178 build_unary_op (INDIRECT_REF, NULL_TREE,
3179 convert (TREE_TYPE (type),
3180 build_unary_op (ADDR_EXPR,
3181 NULL_TREE, expr)));
3182 else
3183 gigi_abort (409);
3184
3185 case COMPLEX_TYPE:
3186 return fold (convert_to_complex (type, expr));
3187
3188 default:
3189 gigi_abort (410);
3190 }
3191 }
3192 \f
3193 /* Remove all conversions that are done in EXP. This includes converting
3194 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3195 is nonzero, always return the address of the containing object even if
3196 the address is not bit-aligned. */
3197
3198 tree
3199 remove_conversions (tree exp, int true_address)
3200 {
3201 switch (TREE_CODE (exp))
3202 {
3203 case CONSTRUCTOR:
3204 if (true_address
3205 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3206 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3207 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3208 break;
3209
3210 case COMPONENT_REF:
3211 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3212 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3213 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3214 break;
3215
3216 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3217 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3218 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3219
3220 default:
3221 break;
3222 }
3223
3224 return exp;
3225 }
3226 \f
3227 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3228 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3229 likewise return an expression pointing to the underlying array. */
3230
3231 tree
3232 maybe_unconstrained_array (tree exp)
3233 {
3234 enum tree_code code = TREE_CODE (exp);
3235 tree new;
3236
3237 switch (TREE_CODE (TREE_TYPE (exp)))
3238 {
3239 case UNCONSTRAINED_ARRAY_TYPE:
3240 if (code == UNCONSTRAINED_ARRAY_REF)
3241 {
3242 new
3243 = build_unary_op (INDIRECT_REF, NULL_TREE,
3244 build_component_ref (TREE_OPERAND (exp, 0),
3245 get_identifier ("P_ARRAY"),
3246 NULL_TREE, 0));
3247 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3248 return new;
3249 }
3250
3251 else if (code == NULL_EXPR)
3252 return build1 (NULL_EXPR,
3253 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3254 (TREE_TYPE (TREE_TYPE (exp))))),
3255 TREE_OPERAND (exp, 0));
3256
3257 case RECORD_TYPE:
3258 /* If this is a padded type, convert to the unpadded type and see if
3259 it contains a template. */
3260 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3261 {
3262 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3263 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3264 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3265 return
3266 build_component_ref (new, NULL_TREE,
3267 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3268 0);
3269 }
3270 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3271 return
3272 build_component_ref (exp, NULL_TREE,
3273 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3274 break;
3275
3276 default:
3277 break;
3278 }
3279
3280 return exp;
3281 }
3282 \f
3283 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3284 If NOTRUNC_P is set, truncation operations should be suppressed. */
3285
3286 tree
3287 unchecked_convert (tree type, tree expr, int notrunc_p)
3288 {
3289 tree etype = TREE_TYPE (expr);
3290
3291 /* If the expression is already the right type, we are done. */
3292 if (etype == type)
3293 return expr;
3294
3295 /* If both types types are integral just do a normal conversion.
3296 Likewise for a conversion to an unconstrained array. */
3297 if ((((INTEGRAL_TYPE_P (type)
3298 && ! (TREE_CODE (type) == INTEGER_TYPE
3299 && TYPE_VAX_FLOATING_POINT_P (type)))
3300 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3301 || (TREE_CODE (type) == RECORD_TYPE
3302 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3303 && ((INTEGRAL_TYPE_P (etype)
3304 && ! (TREE_CODE (etype) == INTEGER_TYPE
3305 && TYPE_VAX_FLOATING_POINT_P (etype)))
3306 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3307 || (TREE_CODE (etype) == RECORD_TYPE
3308 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3309 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3310 {
3311 tree rtype = type;
3312
3313 if (TREE_CODE (etype) == INTEGER_TYPE
3314 && TYPE_BIASED_REPRESENTATION_P (etype))
3315 {
3316 tree ntype = copy_type (etype);
3317
3318 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3319 TYPE_MAIN_VARIANT (ntype) = ntype;
3320 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3321 }
3322
3323 if (TREE_CODE (type) == INTEGER_TYPE
3324 && TYPE_BIASED_REPRESENTATION_P (type))
3325 {
3326 rtype = copy_type (type);
3327 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3328 TYPE_MAIN_VARIANT (rtype) = rtype;
3329 }
3330
3331 expr = convert (rtype, expr);
3332 if (type != rtype)
3333 expr = build1 (GNAT_NOP_EXPR, type, expr);
3334 }
3335
3336 /* If we are converting TO an integral type whose precision is not the
3337 same as its size, first unchecked convert to a record that contains
3338 an object of the output type. Then extract the field. */
3339 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3340 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3341 GET_MODE_BITSIZE (TYPE_MODE (type))))
3342 {
3343 tree rec_type = make_node (RECORD_TYPE);
3344 tree field = create_field_decl (get_identifier ("OBJ"), type,
3345 rec_type, 1, 0, 0, 0);
3346
3347 TYPE_FIELDS (rec_type) = field;
3348 layout_type (rec_type);
3349
3350 expr = unchecked_convert (rec_type, expr, notrunc_p);
3351 expr = build_component_ref (expr, NULL_TREE, field, 0);
3352 }
3353
3354 /* Similarly for integral input type whose precision is not equal to its
3355 size. */
3356 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3357 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3358 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3359 {
3360 tree rec_type = make_node (RECORD_TYPE);
3361 tree field
3362 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3363 1, 0, 0, 0);
3364
3365 TYPE_FIELDS (rec_type) = field;
3366 layout_type (rec_type);
3367
3368 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3369 expr = unchecked_convert (type, expr, notrunc_p);
3370 }
3371
3372 /* We have a special case when we are converting between two
3373 unconstrained array types. In that case, take the address,
3374 convert the fat pointer types, and dereference. */
3375 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3376 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3377 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3378 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3379 build_unary_op (ADDR_EXPR, NULL_TREE,
3380 expr)));
3381 else
3382 {
3383 expr = maybe_unconstrained_array (expr);
3384 etype = TREE_TYPE (expr);
3385 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3386 }
3387
3388 /* If the result is an integral type whose size is not equal to
3389 the size of the underlying machine type, sign- or zero-extend
3390 the result. We need not do this in the case where the input is
3391 an integral type of the same precision and signedness or if the output
3392 is a biased type or if both the input and output are unsigned. */
3393 if (! notrunc_p
3394 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3395 && ! (TREE_CODE (type) == INTEGER_TYPE
3396 && TYPE_BIASED_REPRESENTATION_P (type))
3397 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3398 GET_MODE_BITSIZE (TYPE_MODE (type)))
3399 && ! (INTEGRAL_TYPE_P (etype)
3400 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3401 && operand_equal_p (TYPE_RM_SIZE (type),
3402 (TYPE_RM_SIZE (etype) != 0
3403 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3404 0))
3405 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3406 {
3407 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3408 TYPE_UNSIGNED (type));
3409 tree shift_expr
3410 = convert (base_type,
3411 size_binop (MINUS_EXPR,
3412 bitsize_int
3413 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3414 TYPE_RM_SIZE (type)));
3415 expr
3416 = convert (type,
3417 build_binary_op (RSHIFT_EXPR, base_type,
3418 build_binary_op (LSHIFT_EXPR, base_type,
3419 convert (base_type, expr),
3420 shift_expr),
3421 shift_expr));
3422 }
3423
3424 /* An unchecked conversion should never raise Constraint_Error. The code
3425 below assumes that GCC's conversion routines overflow the same way that
3426 the underlying hardware does. This is probably true. In the rare case
3427 when it is false, we can rely on the fact that such conversions are
3428 erroneous anyway. */
3429 if (TREE_CODE (expr) == INTEGER_CST)
3430 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3431
3432 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3433 show no longer constant. */
3434 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3435 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3436 TREE_CONSTANT (expr) = 0;
3437
3438 return expr;
3439 }
3440
3441 #include "gt-ada-utils.h"
3442 #include "gtype-ada.h"