C90 prototype updates.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39
40 #include "ada.h"
41 #include "types.h"
42 #include "atree.h"
43 #include "elists.h"
44 #include "namet.h"
45 #include "nlists.h"
46 #include "stringt.h"
47 #include "uintp.h"
48 #include "fe.h"
49 #include "sinfo.h"
50 #include "einfo.h"
51 #include "ada-tree.h"
52 #include "gigi.h"
53
54 #ifndef MAX_FIXED_MODE_SIZE
55 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
56 #endif
57
58 #ifndef MAX_BITS_PER_WORD
59 #define MAX_BITS_PER_WORD BITS_PER_WORD
60 #endif
61
62 /* If nonzero, pretend we are allocating at global level. */
63 int force_global;
64
65 /* Tree nodes for the various types and decls we create. */
66 tree gnat_std_decls[(int) ADT_LAST];
67
68 /* Functions to call for each of the possible raise reasons. */
69 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
70
71 /* Associates a GNAT tree node to a GCC tree node. It is used in
72 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
73 of `save_gnu_tree' for more info. */
74 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
75
76 /* This listhead is used to record any global objects that need elaboration.
77 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
78 initial value to assign. */
79
80 static GTY(()) tree pending_elaborations;
81
82 /* This stack allows us to momentarily switch to generating elaboration
83 lists for an inner context. */
84
85 struct e_stack GTY(()) {
86 struct e_stack *next;
87 tree elab_list;
88 };
89 static GTY(()) struct e_stack *elist_stack;
90
91 /* This variable keeps a table for types for each precision so that we only
92 allocate each of them once. Signed and unsigned types are kept separate.
93
94 Note that these types are only used when fold-const requests something
95 special. Perhaps we should NOT share these types; we'll see how it
96 goes later. */
97 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
98
99 /* Likewise for float types, but record these by mode. */
100 static GTY(()) tree float_types[NUM_MACHINE_MODES];
101
102 /* For each binding contour we allocate a binding_level structure which records
103 the entities defined or declared in that contour. Contours include:
104
105 the global one
106 one for each subprogram definition
107 one for each compound statement (declare block)
108
109 Binding contours are used to create GCC tree BLOCK nodes. */
110
111 struct binding_level GTY(())
112 {
113 /* A chain of ..._DECL nodes for all variables, constants, functions,
114 parameters and type declarations. These ..._DECL nodes are chained
115 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
116 in the reverse of the order supplied to be compatible with the
117 back-end. */
118 tree names;
119 /* For each level (except the global one), a chain of BLOCK nodes for all
120 the levels that were entered and exited one level down from this one. */
121 tree blocks;
122 /* The BLOCK node for this level, if one has been preallocated.
123 If 0, the BLOCK is allocated (if needed) when the level is popped. */
124 tree this_block;
125 /* The binding level containing this one (the enclosing binding level). */
126 struct binding_level *level_chain;
127 };
128
129 /* The binding level currently in effect. */
130 static GTY(()) struct binding_level *current_binding_level;
131
132 /* A chain of binding_level structures awaiting reuse. */
133 static GTY((deletable (""))) struct binding_level *free_binding_level;
134
135 /* The outermost binding level. This binding level is created when the
136 compiler is started and it will exist through the entire compilation. */
137 static struct binding_level *global_binding_level;
138
139 /* Binding level structures are initialized by copying this one. */
140 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
141
142 struct language_function GTY(())
143 {
144 int unused;
145 };
146
147 static tree merge_sizes (tree, tree, tree, int, int);
148 static tree compute_related_constant (tree, tree);
149 static tree split_plus (tree, tree *);
150 static int value_zerop (tree);
151 static tree float_type_for_size (int, enum machine_mode);
152 static tree convert_to_fat_pointer (tree, tree);
153 static tree convert_to_thin_pointer (tree, tree);
154 static tree make_descriptor_field (const char *,tree, tree, tree);
155 static int value_factor_p (tree, int);
156 static int potential_alignment_gap (tree, tree, tree);
157 \f
158 /* Initialize the association of GNAT nodes to GCC trees. */
159
160 void
161 init_gnat_to_gnu (void)
162 {
163 associate_gnat_to_gnu
164 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
165
166 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
167 }
168
169 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
170 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
171 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
172
173 If GNU_DECL is zero, a previous association is to be reset. */
174
175 void
176 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
177 {
178 /* Check that GNAT_ENTITY is not already defined and that it is being set
179 to something which is a decl. Raise gigi 401 if not. Usually, this
180 means GNAT_ENTITY is defined twice, but occasionally is due to some
181 Gigi problem. */
182 if (gnu_decl
183 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
184 || (! no_check && ! DECL_P (gnu_decl))))
185 gigi_abort (401);
186
187 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
188 }
189
190 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
191 Return the ..._DECL node that was associated with it. If there is no tree
192 node associated with GNAT_ENTITY, abort.
193
194 In some cases, such as delayed elaboration or expressions that need to
195 be elaborated only once, GNAT_ENTITY is really not an entity. */
196
197 tree
198 get_gnu_tree (Entity_Id gnat_entity)
199 {
200 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
201 gigi_abort (402);
202
203 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
204 }
205
206 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
207
208 int
209 present_gnu_tree (Entity_Id gnat_entity)
210 {
211 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
212 }
213
214 \f
215 /* Return non-zero if we are currently in the global binding level. */
216
217 int
218 global_bindings_p (void)
219 {
220 return (force_global != 0 || current_binding_level == global_binding_level
221 ? -1 : 0);
222 }
223
224 /* Return the list of declarations in the current level. Note that this list
225 is in reverse order (it has to be so for back-end compatibility). */
226
227 tree
228 getdecls (void)
229 {
230 return current_binding_level->names;
231 }
232
233 /* Nonzero if the current level needs to have a BLOCK made. */
234
235 int
236 kept_level_p (void)
237 {
238 return (current_binding_level->names != 0);
239 }
240
241 /* Enter a new binding level. The input parameter is ignored, but has to be
242 specified for back-end compatibility. */
243
244 void
245 pushlevel (int ignore ATTRIBUTE_UNUSED)
246 {
247 struct binding_level *newlevel = NULL;
248
249 /* Reuse a struct for this binding level, if there is one. */
250 if (free_binding_level)
251 {
252 newlevel = free_binding_level;
253 free_binding_level = free_binding_level->level_chain;
254 }
255 else
256 newlevel
257 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
258
259 *newlevel = clear_binding_level;
260
261 /* Add this level to the front of the chain (stack) of levels that are
262 active. */
263 newlevel->level_chain = current_binding_level;
264 current_binding_level = newlevel;
265 }
266
267 /* Exit a binding level.
268 Pop the level off, and restore the state of the identifier-decl mappings
269 that were in effect when this level was entered.
270
271 If KEEP is nonzero, this level had explicit declarations, so
272 and create a "block" (a BLOCK node) for the level
273 to record its declarations and subblocks for symbol table output.
274
275 If FUNCTIONBODY is nonzero, this level is the body of a function,
276 so create a block as if KEEP were set and also clear out all
277 label names.
278
279 If REVERSE is nonzero, reverse the order of decls before putting
280 them into the BLOCK. */
281
282 tree
283 poplevel (int keep, int reverse, int functionbody)
284 {
285 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
286 binding level that we are about to exit and which is returned by this
287 routine. */
288 tree block = NULL_TREE;
289 tree decl_chain;
290 tree decl_node;
291 tree subblock_chain = current_binding_level->blocks;
292 tree subblock_node;
293 int block_previously_created;
294
295 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
296 nodes chained through the `names' field of current_binding_level are in
297 reverse order except for PARM_DECL node, which are explicitly stored in
298 the right order. */
299 current_binding_level->names
300 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
301 : current_binding_level->names;
302
303 /* Output any nested inline functions within this block which must be
304 compiled because their address is needed. */
305 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
306 if (TREE_CODE (decl_node) == FUNCTION_DECL
307 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
308 && DECL_INITIAL (decl_node) != 0)
309 {
310 push_function_context ();
311 output_inline_function (decl_node);
312 pop_function_context ();
313 }
314
315 block = 0;
316 block_previously_created = (current_binding_level->this_block != 0);
317 if (block_previously_created)
318 block = current_binding_level->this_block;
319 else if (keep || functionbody)
320 block = make_node (BLOCK);
321 if (block != 0)
322 {
323 BLOCK_VARS (block) = keep ? decl_chain : 0;
324 BLOCK_SUBBLOCKS (block) = subblock_chain;
325 }
326
327 /* Record the BLOCK node just built as the subblock its enclosing scope. */
328 for (subblock_node = subblock_chain; subblock_node;
329 subblock_node = TREE_CHAIN (subblock_node))
330 BLOCK_SUPERCONTEXT (subblock_node) = block;
331
332 /* Clear out the meanings of the local variables of this level. */
333
334 for (subblock_node = decl_chain; subblock_node;
335 subblock_node = TREE_CHAIN (subblock_node))
336 if (DECL_NAME (subblock_node) != 0)
337 /* If the identifier was used or addressed via a local extern decl,
338 don't forget that fact. */
339 if (DECL_EXTERNAL (subblock_node))
340 {
341 if (TREE_USED (subblock_node))
342 TREE_USED (DECL_NAME (subblock_node)) = 1;
343 if (TREE_ADDRESSABLE (subblock_node))
344 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
345 }
346
347 {
348 /* Pop the current level, and free the structure for reuse. */
349 struct binding_level *level = current_binding_level;
350 current_binding_level = current_binding_level->level_chain;
351 level->level_chain = free_binding_level;
352 free_binding_level = level;
353 }
354
355 if (functionbody)
356 {
357 /* This is the top level block of a function. The ..._DECL chain stored
358 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
359 leave them in the BLOCK because they are found in the FUNCTION_DECL
360 instead. */
361 DECL_INITIAL (current_function_decl) = block;
362 BLOCK_VARS (block) = 0;
363 }
364 else if (block)
365 {
366 if (!block_previously_created)
367 current_binding_level->blocks
368 = chainon (current_binding_level->blocks, block);
369 }
370
371 /* If we did not make a block for the level just exited, any blocks made for
372 inner levels (since they cannot be recorded as subblocks in that level)
373 must be carried forward so they will later become subblocks of something
374 else. */
375 else if (subblock_chain)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, subblock_chain);
378 if (block)
379 TREE_USED (block) = 1;
380
381 return block;
382 }
383 \f
384 /* Insert BLOCK at the end of the list of subblocks of the
385 current binding level. This is used when a BIND_EXPR is expanded,
386 to handle the BLOCK node inside the BIND_EXPR. */
387
388 void
389 insert_block (tree block)
390 {
391 TREE_USED (block) = 1;
392 current_binding_level->blocks
393 = chainon (current_binding_level->blocks, block);
394 }
395
396 /* Set the BLOCK node for the innermost scope
397 (the one we are currently in). */
398
399 void
400 set_block (tree block)
401 {
402 current_binding_level->this_block = block;
403 current_binding_level->names = chainon (current_binding_level->names,
404 BLOCK_VARS (block));
405 current_binding_level->blocks = chainon (current_binding_level->blocks,
406 BLOCK_SUBBLOCKS (block));
407 }
408
409 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
410 Returns the ..._DECL node. */
411
412 tree
413 pushdecl (tree decl)
414 {
415 struct binding_level *b;
416
417 /* If at top level, there is no context. But PARM_DECLs always go in the
418 level of its function. */
419 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
420 {
421 b = global_binding_level;
422 DECL_CONTEXT (decl) = 0;
423 }
424 else
425 {
426 b = current_binding_level;
427 DECL_CONTEXT (decl) = current_function_decl;
428 }
429
430 /* Put the declaration on the list. The list of declarations is in reverse
431 order. The list will be reversed later if necessary. This needs to be
432 this way for compatibility with the back-end.
433
434 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
435 will cause trouble with the debugger and aren't needed anyway. */
436 if (TREE_CODE (decl) != TYPE_DECL
437 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
438 {
439 TREE_CHAIN (decl) = b->names;
440 b->names = decl;
441 }
442
443 /* For the declaration of a type, set its name if it either is not already
444 set, was set to an IDENTIFIER_NODE, indicating an internal name,
445 or if the previous type name was not derived from a source name.
446 We'd rather have the type named with a real name and all the pointer
447 types to the same object have the same POINTER_TYPE node. Code in this
448 function in c-decl.c makes a copy of the type node here, but that may
449 cause us trouble with incomplete types, so let's not try it (at least
450 for now). */
451
452 if (TREE_CODE (decl) == TYPE_DECL
453 && DECL_NAME (decl) != 0
454 && (TYPE_NAME (TREE_TYPE (decl)) == 0
455 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
456 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
457 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
458 && ! DECL_ARTIFICIAL (decl))))
459 TYPE_NAME (TREE_TYPE (decl)) = decl;
460
461 return decl;
462 }
463 \f
464 /* Do little here. Set up the standard declarations later after the
465 front end has been run. */
466
467 void
468 gnat_init_decl_processing (void)
469 {
470 input_line = 0;
471
472 /* Make the binding_level structure for global names. */
473 current_function_decl = 0;
474 current_binding_level = 0;
475 free_binding_level = 0;
476 pushlevel (0);
477 global_binding_level = current_binding_level;
478
479 build_common_tree_nodes (0);
480
481 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
482 corresponding to the size of Pmode. In most cases when ptr_mode and
483 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
484 far better code using the width of Pmode. Make this here since we need
485 this before we can expand the GNAT types. */
486 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
487 build_common_tree_nodes_2 (0);
488
489 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
490
491 /* We need to make the integer type before doing anything else.
492 We stitch this in to the appropriate GNAT type later. */
493 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
494 integer_type_node));
495 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
496 char_type_node));
497
498 ptr_void_type_node = build_pointer_type (void_type_node);
499
500 }
501
502 /* Create the predefined scalar types such as `integer_type_node' needed
503 in the gcc back-end and initialize the global binding level. */
504
505 void
506 init_gigi_decls (tree long_long_float_type, tree exception_type)
507 {
508 tree endlink, decl;
509 unsigned int i;
510
511 /* Set the types that GCC and Gigi use from the front end. We would like
512 to do this for char_type_node, but it needs to correspond to the C
513 char type. */
514 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
515 {
516 /* In this case, the builtin floating point types are VAX float,
517 so make up a type for use. */
518 longest_float_type_node = make_node (REAL_TYPE);
519 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
520 layout_type (longest_float_type_node);
521 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
522 longest_float_type_node));
523 }
524 else
525 longest_float_type_node = TREE_TYPE (long_long_float_type);
526
527 except_type_node = TREE_TYPE (exception_type);
528
529 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
530 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
531 unsigned_type_node));
532
533 void_type_decl_node
534 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
535 void_type_node));
536
537 void_ftype = build_function_type (void_type_node, NULL_TREE);
538 ptr_void_ftype = build_pointer_type (void_ftype);
539
540 /* Now declare runtime functions. */
541 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
542
543 /* malloc is a function declaration tree for a function to allocate
544 memory. */
545 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
546 NULL_TREE,
547 build_function_type (ptr_void_type_node,
548 tree_cons (NULL_TREE,
549 sizetype,
550 endlink)),
551 NULL_TREE, 0, 1, 1, 0);
552
553 /* free is a function declaration tree for a function to free memory. */
554 free_decl
555 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
556 build_function_type (void_type_node,
557 tree_cons (NULL_TREE,
558 ptr_void_type_node,
559 endlink)),
560 NULL_TREE, 0, 1, 1, 0);
561
562 /* Make the types and functions used for exception processing. */
563 jmpbuf_type
564 = build_array_type (gnat_type_for_mode (Pmode, 0),
565 build_index_type (build_int_2 (5, 0)));
566 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
567 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
568
569 /* Functions to get and set the jumpbuf pointer for the current thread. */
570 get_jmpbuf_decl
571 = create_subprog_decl
572 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
573 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
574 NULL_TREE, 0, 1, 1, 0);
575
576 set_jmpbuf_decl
577 = create_subprog_decl
578 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
579 NULL_TREE,
580 build_function_type (void_type_node,
581 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
582 NULL_TREE, 0, 1, 1, 0);
583
584 /* Function to get the current exception. */
585 get_excptr_decl
586 = create_subprog_decl
587 (get_identifier ("system__soft_links__get_gnat_exception"),
588 NULL_TREE,
589 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
590 NULL_TREE, 0, 1, 1, 0);
591
592 /* Functions that raise exceptions. */
593 raise_nodefer_decl
594 = create_subprog_decl
595 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
596 build_function_type (void_type_node,
597 tree_cons (NULL_TREE,
598 build_pointer_type (except_type_node),
599 endlink)),
600 NULL_TREE, 0, 1, 1, 0);
601
602 /* Hooks to call when entering/leaving an exception handler. */
603 begin_handler_decl
604 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
605 build_function_type (void_type_node,
606 tree_cons (NULL_TREE,
607 ptr_void_type_node,
608 endlink)),
609 NULL_TREE, 0, 1, 1, 0);
610
611 end_handler_decl
612 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
613 build_function_type (void_type_node,
614 tree_cons (NULL_TREE,
615 ptr_void_type_node,
616 endlink)),
617 NULL_TREE, 0, 1, 1, 0);
618
619 /* If in no exception handlers mode, all raise statements are redirected to
620 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
621 this procedure will never be called in this mode. */
622 if (No_Exception_Handlers_Set ())
623 {
624 decl
625 = create_subprog_decl
626 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
627 build_function_type (void_type_node,
628 tree_cons (NULL_TREE,
629 build_pointer_type (char_type_node),
630 tree_cons (NULL_TREE,
631 integer_type_node,
632 endlink))),
633 NULL_TREE, 0, 1, 1, 0);
634
635 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
636 gnat_raise_decls[i] = decl;
637 }
638 else
639 /* Otherwise, make one decl for each exception reason. */
640 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
641 {
642 char name[17];
643
644 sprintf (name, "__gnat_rcheck_%.2d", i);
645 gnat_raise_decls[i]
646 = create_subprog_decl
647 (get_identifier (name), NULL_TREE,
648 build_function_type (void_type_node,
649 tree_cons (NULL_TREE,
650 build_pointer_type
651 (char_type_node),
652 tree_cons (NULL_TREE,
653 integer_type_node,
654 endlink))),
655 NULL_TREE, 0, 1, 1, 0);
656 }
657
658 /* Indicate that these never return. */
659 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
660 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
661 TREE_TYPE (raise_nodefer_decl)
662 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
663 TYPE_QUAL_VOLATILE);
664
665 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
666 {
667 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
668 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
669 TREE_TYPE (gnat_raise_decls[i])
670 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
671 TYPE_QUAL_VOLATILE);
672 }
673
674 /* setjmp returns an integer and has one operand, which is a pointer to
675 a jmpbuf. */
676 setjmp_decl
677 = create_subprog_decl
678 (get_identifier ("setjmp"), NULL_TREE,
679 build_function_type (integer_type_node,
680 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
681 NULL_TREE, 0, 1, 1, 0);
682
683 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
684 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
685
686 main_identifier_node = get_identifier ("main");
687 }
688 \f
689 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
690 nodes (FIELDLIST), finish constructing the record or union type.
691 If HAS_REP is nonzero, this record has a rep clause; don't call
692 layout_type but merely set the size and alignment ourselves.
693 If DEFER_DEBUG is nonzero, do not call the debugging routines
694 on this type; it will be done later. */
695
696 void
697 finish_record_type (tree record_type,
698 tree fieldlist,
699 int has_rep,
700 int defer_debug)
701 {
702 enum tree_code code = TREE_CODE (record_type);
703 tree ada_size = bitsize_zero_node;
704 tree size = bitsize_zero_node;
705 tree size_unit = size_zero_node;
706 int var_size = 0;
707 tree field;
708
709 TYPE_FIELDS (record_type) = fieldlist;
710
711 if (TYPE_NAME (record_type) != 0
712 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
713 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
714 else
715 TYPE_STUB_DECL (record_type)
716 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
717 record_type));
718
719 /* We don't need both the typedef name and the record name output in
720 the debugging information, since they are the same. */
721 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
722
723 /* Globally initialize the record first. If this is a rep'ed record,
724 that just means some initializations; otherwise, layout the record. */
725
726 if (has_rep)
727 {
728 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
729 TYPE_MODE (record_type) = BLKmode;
730 if (TYPE_SIZE (record_type) == 0)
731 {
732 TYPE_SIZE (record_type) = bitsize_zero_node;
733 TYPE_SIZE_UNIT (record_type) = size_zero_node;
734 }
735 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
736 out just like a UNION_TYPE, since the size will be fixed. */
737 else if (code == QUAL_UNION_TYPE)
738 code = UNION_TYPE;
739 }
740 else
741 {
742 /* Ensure there isn't a size already set. There can be in an error
743 case where there is a rep clause but all fields have errors and
744 no longer have a position. */
745 TYPE_SIZE (record_type) = 0;
746 layout_type (record_type);
747 }
748
749 /* At this point, the position and size of each field is known. It was
750 either set before entry by a rep clause, or by laying out the type
751 above. We now make a pass through the fields (in reverse order for
752 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
753 (for rep'ed records that are not padding types); and the mode (for
754 rep'ed records). */
755
756 if (code == QUAL_UNION_TYPE)
757 fieldlist = nreverse (fieldlist);
758
759 for (field = fieldlist; field; field = TREE_CHAIN (field))
760 {
761 tree type = TREE_TYPE (field);
762 tree this_size = DECL_SIZE (field);
763 tree this_size_unit = DECL_SIZE_UNIT (field);
764 tree this_ada_size = DECL_SIZE (field);
765
766 /* We need to make an XVE/XVU record if any field has variable size,
767 whether or not the record does. For example, if we have an union,
768 it may be that all fields, rounded up to the alignment, have the
769 same size, in which case we'll use that size. But the debug
770 output routines (except Dwarf2) won't be able to output the fields,
771 so we need to make the special record. */
772 if (TREE_CODE (this_size) != INTEGER_CST)
773 var_size = 1;
774
775 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
776 || TREE_CODE (type) == QUAL_UNION_TYPE)
777 && ! TYPE_IS_FAT_POINTER_P (type)
778 && ! TYPE_CONTAINS_TEMPLATE_P (type)
779 && TYPE_ADA_SIZE (type) != 0)
780 this_ada_size = TYPE_ADA_SIZE (type);
781
782 if (has_rep && ! DECL_BIT_FIELD (field))
783 TYPE_ALIGN (record_type)
784 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
785
786 switch (code)
787 {
788 case UNION_TYPE:
789 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
790 size = size_binop (MAX_EXPR, size, this_size);
791 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
792 break;
793
794 case QUAL_UNION_TYPE:
795 ada_size
796 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
797 this_ada_size, ada_size));
798 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
799 this_size, size));
800 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
801 this_size_unit, size_unit));
802 break;
803
804 case RECORD_TYPE:
805 /* Since we know here that all fields are sorted in order of
806 increasing bit position, the size of the record is one
807 higher than the ending bit of the last field processed
808 unless we have a rep clause, since in that case we might
809 have a field outside a QUAL_UNION_TYPE that has a higher ending
810 position. So use a MAX in that case. Also, if this field is a
811 QUAL_UNION_TYPE, we need to take into account the previous size in
812 the case of empty variants. */
813 ada_size
814 = merge_sizes (ada_size, bit_position (field), this_ada_size,
815 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
816 size = merge_sizes (size, bit_position (field), this_size,
817 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
818 size_unit
819 = merge_sizes (size_unit, byte_position (field), this_size_unit,
820 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
821 break;
822
823 default:
824 abort ();
825 }
826 }
827
828 if (code == QUAL_UNION_TYPE)
829 nreverse (fieldlist);
830
831 /* If this is a padding record, we never want to make the size smaller than
832 what was specified in it, if any. */
833 if (TREE_CODE (record_type) == RECORD_TYPE
834 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
835 {
836 size = TYPE_SIZE (record_type);
837 size_unit = TYPE_SIZE_UNIT (record_type);
838 }
839
840 /* Now set any of the values we've just computed that apply. */
841 if (! TYPE_IS_FAT_POINTER_P (record_type)
842 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
843 SET_TYPE_ADA_SIZE (record_type, ada_size);
844
845 if (has_rep)
846 {
847 if (! (TREE_CODE (record_type) == RECORD_TYPE
848 && TYPE_IS_PADDING_P (record_type)
849 && CONTAINS_PLACEHOLDER_P (size)))
850 {
851 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
852 TYPE_SIZE_UNIT (record_type)
853 = round_up (size_unit,
854 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
855 }
856
857 compute_record_mode (record_type);
858 }
859
860 if (! defer_debug)
861 {
862 /* If this record is of variable size, rename it so that the
863 debugger knows it is and make a new, parallel, record
864 that tells the debugger how the record is laid out. See
865 exp_dbug.ads. But don't do this for records that are padding
866 since they confuse GDB. */
867 if (var_size
868 && ! (TREE_CODE (record_type) == RECORD_TYPE
869 && TYPE_IS_PADDING_P (record_type)))
870 {
871 tree new_record_type
872 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
873 ? UNION_TYPE : TREE_CODE (record_type));
874 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
875 tree new_id
876 = concat_id_with_name (orig_id,
877 TREE_CODE (record_type) == QUAL_UNION_TYPE
878 ? "XVU" : "XVE");
879 tree last_pos = bitsize_zero_node;
880 tree old_field;
881 tree prev_old_field = 0;
882
883 TYPE_NAME (new_record_type) = new_id;
884 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
885 TYPE_STUB_DECL (new_record_type)
886 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
887 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
888 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
889 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
890 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
891
892 /* Now scan all the fields, replacing each field with a new
893 field corresponding to the new encoding. */
894 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
895 old_field = TREE_CHAIN (old_field))
896 {
897 tree field_type = TREE_TYPE (old_field);
898 tree field_name = DECL_NAME (old_field);
899 tree new_field;
900 tree curpos = bit_position (old_field);
901 int var = 0;
902 unsigned int align = 0;
903 tree pos;
904
905 /* See how the position was modified from the last position.
906
907 There are two basic cases we support: a value was added
908 to the last position or the last position was rounded to
909 a boundary and they something was added. Check for the
910 first case first. If not, see if there is any evidence
911 of rounding. If so, round the last position and try
912 again.
913
914 If this is a union, the position can be taken as zero. */
915
916 if (TREE_CODE (new_record_type) == UNION_TYPE)
917 pos = bitsize_zero_node, align = 0;
918 else
919 pos = compute_related_constant (curpos, last_pos);
920
921 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
922 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
923 {
924 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
925 pos = compute_related_constant (curpos,
926 round_up (last_pos, align));
927 }
928 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
929 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
930 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
931 && host_integerp (TREE_OPERAND
932 (TREE_OPERAND (curpos, 0), 1),
933 1))
934 {
935 align
936 = tree_low_cst
937 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
938 pos = compute_related_constant (curpos,
939 round_up (last_pos, align));
940 }
941 else if (potential_alignment_gap (prev_old_field, old_field,
942 pos))
943 {
944 align = TYPE_ALIGN (field_type);
945 pos = compute_related_constant (curpos,
946 round_up (last_pos, align));
947 }
948
949 /* If we can't compute a position, set it to zero.
950
951 ??? We really should abort here, but it's too much work
952 to get this correct for all cases. */
953
954 if (pos == 0)
955 pos = bitsize_zero_node;
956
957 /* See if this type is variable-size and make a new type
958 and indicate the indirection if so. */
959 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
960 {
961 field_type = build_pointer_type (field_type);
962 var = 1;
963 }
964
965 /* Make a new field name, if necessary. */
966 if (var || align != 0)
967 {
968 char suffix[6];
969
970 if (align != 0)
971 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
972 align / BITS_PER_UNIT);
973 else
974 strcpy (suffix, "XVL");
975
976 field_name = concat_id_with_name (field_name, suffix);
977 }
978
979 new_field = create_field_decl (field_name, field_type,
980 new_record_type, 0,
981 DECL_SIZE (old_field), pos, 0);
982 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
983 TYPE_FIELDS (new_record_type) = new_field;
984
985 /* If old_field is a QUAL_UNION_TYPE, take its size as being
986 zero. The only time it's not the last field of the record
987 is when there are other components at fixed positions after
988 it (meaning there was a rep clause for every field) and we
989 want to be able to encode them. */
990 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
991 (TREE_CODE (TREE_TYPE (old_field))
992 == QUAL_UNION_TYPE)
993 ? bitsize_zero_node
994 : DECL_SIZE (old_field));
995 prev_old_field = old_field;
996 }
997
998 TYPE_FIELDS (new_record_type)
999 = nreverse (TYPE_FIELDS (new_record_type));
1000
1001 rest_of_type_compilation (new_record_type, global_bindings_p ());
1002 }
1003
1004 rest_of_type_compilation (record_type, global_bindings_p ());
1005 }
1006 }
1007
1008 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1009 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1010 if this represents a QUAL_UNION_TYPE in which case we must look for
1011 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1012 is nonzero, we must take the MAX of the end position of this field
1013 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1014
1015 We return an expression for the size. */
1016
1017 static tree
1018 merge_sizes (tree last_size,
1019 tree first_bit,
1020 tree size,
1021 int special,
1022 int has_rep)
1023 {
1024 tree type = TREE_TYPE (last_size);
1025 tree new;
1026
1027 if (! special || TREE_CODE (size) != COND_EXPR)
1028 {
1029 new = size_binop (PLUS_EXPR, first_bit, size);
1030 if (has_rep)
1031 new = size_binop (MAX_EXPR, last_size, new);
1032 }
1033
1034 else
1035 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1036 integer_zerop (TREE_OPERAND (size, 1))
1037 ? last_size : merge_sizes (last_size, first_bit,
1038 TREE_OPERAND (size, 1),
1039 1, has_rep),
1040 integer_zerop (TREE_OPERAND (size, 2))
1041 ? last_size : merge_sizes (last_size, first_bit,
1042 TREE_OPERAND (size, 2),
1043 1, has_rep)));
1044
1045 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1046 when fed through substitute_in_expr) into thinking that a constant
1047 size is not constant. */
1048 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1049 new = TREE_OPERAND (new, 0);
1050
1051 return new;
1052 }
1053
1054 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1055 related by the addition of a constant. Return that constant if so. */
1056
1057 static tree
1058 compute_related_constant (tree op0, tree op1)
1059 {
1060 tree op0_var, op1_var;
1061 tree op0_con = split_plus (op0, &op0_var);
1062 tree op1_con = split_plus (op1, &op1_var);
1063 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1064
1065 if (operand_equal_p (op0_var, op1_var, 0))
1066 return result;
1067 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1068 return result;
1069 else
1070 return 0;
1071 }
1072
1073 /* Utility function of above to split a tree OP which may be a sum, into a
1074 constant part, which is returned, and a variable part, which is stored
1075 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1076 bitsizetype. */
1077
1078 static tree
1079 split_plus (tree in, tree *pvar)
1080 {
1081 /* Strip NOPS in order to ease the tree traversal and maximize the
1082 potential for constant or plus/minus discovery. We need to be careful
1083 to always return and set *pvar to bitsizetype trees, but it's worth
1084 the effort. */
1085 STRIP_NOPS (in);
1086
1087 *pvar = convert (bitsizetype, in);
1088
1089 if (TREE_CODE (in) == INTEGER_CST)
1090 {
1091 *pvar = bitsize_zero_node;
1092 return convert (bitsizetype, in);
1093 }
1094 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1095 {
1096 tree lhs_var, rhs_var;
1097 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1098 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1099
1100 if (lhs_var == TREE_OPERAND (in, 0)
1101 && rhs_var == TREE_OPERAND (in, 1))
1102 return bitsize_zero_node;
1103
1104 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1105 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1106 }
1107 else
1108 return bitsize_zero_node;
1109 }
1110 \f
1111 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1112 subprogram. If it is void_type_node, then we are dealing with a procedure,
1113 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1114 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1115 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1116 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1117 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1118 RETURNS_WITH_DSP is nonzero if the function is to return with a
1119 depressed stack pointer. */
1120
1121 tree
1122 create_subprog_type (tree return_type,
1123 tree param_decl_list,
1124 tree cico_list,
1125 int returns_unconstrained,
1126 int returns_by_ref,
1127 int returns_with_dsp)
1128 {
1129 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1130 the subprogram formal parameters. This list is generated by traversing the
1131 input list of PARM_DECL nodes. */
1132 tree param_type_list = NULL;
1133 tree param_decl;
1134 tree type;
1135
1136 for (param_decl = param_decl_list; param_decl;
1137 param_decl = TREE_CHAIN (param_decl))
1138 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1139 param_type_list);
1140
1141 /* The list of the function parameter types has to be terminated by the void
1142 type to signal to the back-end that we are not dealing with a variable
1143 parameter subprogram, but that the subprogram has a fixed number of
1144 parameters. */
1145 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1146
1147 /* The list of argument types has been created in reverse
1148 so nreverse it. */
1149 param_type_list = nreverse (param_type_list);
1150
1151 type = build_function_type (return_type, param_type_list);
1152
1153 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1154 or the new type should, make a copy of TYPE. Likewise for
1155 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1156 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1157 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1158 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1159 type = copy_type (type);
1160
1161 SET_TYPE_CI_CO_LIST (type, cico_list);
1162 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1163 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1164 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1165 return type;
1166 }
1167 \f
1168 /* Return a copy of TYPE but safe to modify in any way. */
1169
1170 tree
1171 copy_type (tree type)
1172 {
1173 tree new = copy_node (type);
1174
1175 /* copy_node clears this field instead of copying it, because it is
1176 aliased with TREE_CHAIN. */
1177 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1178
1179 TYPE_POINTER_TO (new) = 0;
1180 TYPE_REFERENCE_TO (new) = 0;
1181 TYPE_MAIN_VARIANT (new) = new;
1182 TYPE_NEXT_VARIANT (new) = 0;
1183
1184 return new;
1185 }
1186 \f
1187 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1188 TYPE_INDEX_TYPE is INDEX. */
1189
1190 tree
1191 create_index_type (tree min, tree max, tree index)
1192 {
1193 /* First build a type for the desired range. */
1194 tree type = build_index_2_type (min, max);
1195
1196 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1197 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1198 is set, but not to INDEX, make a copy of this type with the requested
1199 index type. Note that we have no way of sharing these types, but that's
1200 only a small hole. */
1201 if (TYPE_INDEX_TYPE (type) == index)
1202 return type;
1203 else if (TYPE_INDEX_TYPE (type) != 0)
1204 type = copy_type (type);
1205
1206 SET_TYPE_INDEX_TYPE (type, index);
1207 return type;
1208 }
1209 \f
1210 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1211 string) and TYPE is a ..._TYPE node giving its data type.
1212 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1213 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1214 information about this type. */
1215
1216 tree
1217 create_type_decl (tree type_name,
1218 tree type,
1219 struct attrib *attr_list,
1220 int artificial_p,
1221 int debug_info_p)
1222 {
1223 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1224 enum tree_code code = TREE_CODE (type);
1225
1226 DECL_ARTIFICIAL (type_decl) = artificial_p;
1227 pushdecl (type_decl);
1228 process_attributes (type_decl, attr_list);
1229
1230 /* Pass type declaration information to the debugger unless this is an
1231 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1232 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1233 a dummy type, which will be completed later, or a type for which
1234 debugging information was not requested. */
1235 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1236 || ! debug_info_p)
1237 DECL_IGNORED_P (type_decl) = 1;
1238 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1239 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1240 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1241 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1242
1243 return type_decl;
1244 }
1245
1246 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1247 ASM_NAME is its assembler name (if provided). TYPE is its data type
1248 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1249 expression; NULL_TREE if none.
1250
1251 CONST_FLAG is nonzero if this variable is constant.
1252
1253 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1254 the current compilation unit. This flag should be set when processing the
1255 variable definitions in a package specification. EXTERN_FLAG is nonzero
1256 when processing an external variable declaration (as opposed to a
1257 definition: no storage is to be allocated for the variable here).
1258
1259 STATIC_FLAG is only relevant when not at top level. In that case
1260 it indicates whether to always allocate storage to the variable. */
1261
1262 tree
1263 create_var_decl (tree var_name,
1264 tree asm_name,
1265 tree type,
1266 tree var_init,
1267 int const_flag,
1268 int public_flag,
1269 int extern_flag,
1270 int static_flag,
1271 struct attrib *attr_list)
1272 {
1273 int init_const
1274 = (var_init == 0
1275 ? 0
1276 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1277 && (global_bindings_p () || static_flag
1278 ? 0 != initializer_constant_valid_p (var_init,
1279 TREE_TYPE (var_init))
1280 : TREE_CONSTANT (var_init))));
1281 tree var_decl
1282 = build_decl ((const_flag && init_const
1283 /* Only make a CONST_DECL for sufficiently-small objects.
1284 We consider complex double "sufficiently-small" */
1285 && TYPE_SIZE (type) != 0
1286 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1287 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1288 GET_MODE_SIZE (DCmode)))
1289 ? CONST_DECL : VAR_DECL, var_name, type);
1290 tree assign_init = 0;
1291
1292 /* If this is external, throw away any initializations unless this is a
1293 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1294 we are defining a global here, leave a constant initialization and save
1295 any variable elaborations for the elaboration routine. Otherwise, if
1296 the initializing expression is not the same as TYPE, generate the
1297 initialization with an assignment statement, since it knows how
1298 to do the required adjustents. If we are just annotating types,
1299 throw away the initialization if it isn't a constant. */
1300
1301 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1302 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1303 var_init = 0;
1304
1305 if (global_bindings_p () && var_init != 0 && ! init_const)
1306 {
1307 add_pending_elaborations (var_decl, var_init);
1308 var_init = 0;
1309 }
1310
1311 else if (var_init != 0
1312 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1313 != TYPE_MAIN_VARIANT (type))
1314 || (static_flag && ! init_const)))
1315 assign_init = var_init, var_init = 0;
1316
1317 DECL_COMMON (var_decl) = !flag_no_common;
1318 DECL_INITIAL (var_decl) = var_init;
1319 TREE_READONLY (var_decl) = const_flag;
1320 DECL_EXTERNAL (var_decl) = extern_flag;
1321 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1322 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1323 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1324 = TYPE_VOLATILE (type);
1325
1326 /* At the global binding level we need to allocate static storage for the
1327 variable if and only if its not external. If we are not at the top level
1328 we allocate automatic storage unless requested not to. */
1329 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1330
1331 if (asm_name != 0)
1332 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1333
1334 process_attributes (var_decl, attr_list);
1335
1336 /* Add this decl to the current binding level and generate any
1337 needed code and RTL. */
1338 var_decl = pushdecl (var_decl);
1339 expand_decl (var_decl);
1340
1341 if (DECL_CONTEXT (var_decl) != 0)
1342 expand_decl_init (var_decl);
1343
1344 /* If this is volatile, force it into memory. */
1345 if (TREE_SIDE_EFFECTS (var_decl))
1346 gnat_mark_addressable (var_decl);
1347
1348 if (TREE_CODE (var_decl) != CONST_DECL)
1349 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1350
1351 if (assign_init != 0)
1352 {
1353 /* If VAR_DECL has a padded type, convert it to the unpadded
1354 type so the assignment is done properly. */
1355 tree lhs = var_decl;
1356
1357 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1358 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1359 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1360
1361 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1362 assign_init));
1363 }
1364
1365 return var_decl;
1366 }
1367 \f
1368 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1369 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1370 this field is in a record type with a "pragma pack". If SIZE is nonzero
1371 it is the specified size for this field. If POS is nonzero, it is the bit
1372 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1373 the address of this field for aliasing purposes. */
1374
1375 tree
1376 create_field_decl (tree field_name,
1377 tree field_type,
1378 tree record_type,
1379 int packed,
1380 tree size,
1381 tree pos,
1382 int addressable)
1383 {
1384 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1385
1386 DECL_CONTEXT (field_decl) = record_type;
1387 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1388
1389 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1390 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1391 if (packed && TYPE_MODE (field_type) == BLKmode)
1392 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1393
1394 /* If a size is specified, use it. Otherwise, see if we have a size
1395 to use that may differ from the natural size of the object. */
1396 if (size != 0)
1397 size = convert (bitsizetype, size);
1398 else if (packed)
1399 {
1400 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1401 TYPE_SIZE (field_type), 0))
1402 size = rm_size (field_type);
1403
1404 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1405 byte. */
1406 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1407 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1408 size = round_up (size, BITS_PER_UNIT);
1409 }
1410
1411 /* Make a bitfield if a size is specified for two reasons: first if the size
1412 differs from the natural size. Second, if the alignment is insufficient.
1413 There are a number of ways the latter can be true. But never make a
1414 bitfield if the type of the field has a nonconstant size. */
1415
1416 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1417 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1418 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1419 || (pos != 0
1420 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1421 bitsize_int (TYPE_ALIGN
1422 (field_type)))))
1423 || packed
1424 || (TYPE_ALIGN (record_type) != 0
1425 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1426 {
1427 DECL_BIT_FIELD (field_decl) = 1;
1428 DECL_SIZE (field_decl) = size;
1429 if (! packed && pos == 0)
1430 DECL_ALIGN (field_decl)
1431 = (TYPE_ALIGN (record_type) != 0
1432 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1433 : TYPE_ALIGN (field_type));
1434 }
1435
1436 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1437 DECL_ALIGN (field_decl)
1438 = MAX (DECL_ALIGN (field_decl),
1439 DECL_BIT_FIELD (field_decl) ? 1
1440 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1441 : TYPE_ALIGN (field_type));
1442
1443 if (pos != 0)
1444 {
1445 /* We need to pass in the alignment the DECL is known to have.
1446 This is the lowest-order bit set in POS, but no more than
1447 the alignment of the record, if one is specified. Note
1448 that an alignment of 0 is taken as infinite. */
1449 unsigned int known_align;
1450
1451 if (host_integerp (pos, 1))
1452 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1453 else
1454 known_align = BITS_PER_UNIT;
1455
1456 if (TYPE_ALIGN (record_type)
1457 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1458 known_align = TYPE_ALIGN (record_type);
1459
1460 layout_decl (field_decl, known_align);
1461 SET_DECL_OFFSET_ALIGN (field_decl,
1462 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1463 : BITS_PER_UNIT);
1464 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1465 &DECL_FIELD_BIT_OFFSET (field_decl),
1466 DECL_OFFSET_ALIGN (field_decl), pos);
1467
1468 DECL_HAS_REP_P (field_decl) = 1;
1469 }
1470
1471 /* If the field type is passed by reference, we will have pointers to the
1472 field, so it is addressable. */
1473 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1474 addressable = 1;
1475
1476 /* ??? For now, we say that any field of aggregate type is addressable
1477 because the front end may take 'Reference of it. */
1478 if (AGGREGATE_TYPE_P (field_type))
1479 addressable = 1;
1480
1481 /* Mark the decl as nonaddressable if it either is indicated so semantically
1482 or if it is a bit field. */
1483 DECL_NONADDRESSABLE_P (field_decl)
1484 = ! addressable || DECL_BIT_FIELD (field_decl);
1485
1486 return field_decl;
1487 }
1488
1489 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1490 effects, has the value of zero. */
1491
1492 static int
1493 value_zerop (tree exp)
1494 {
1495 if (TREE_CODE (exp) == COMPOUND_EXPR)
1496 return value_zerop (TREE_OPERAND (exp, 1));
1497
1498 return integer_zerop (exp);
1499 }
1500 \f
1501 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1502 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1503 readonly (either an IN parameter or an address of a pass-by-ref
1504 parameter). */
1505
1506 tree
1507 create_param_decl (tree param_name, tree param_type, int readonly)
1508 {
1509 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1510
1511 /* Honor the PROMOTE_PROTOTYPES target macro, as not doing so can
1512 lead to various ABI violations. */
1513 #ifdef PROMOTE_PROTOTYPES
1514 if ((TREE_CODE (param_type) == INTEGER_TYPE
1515 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1516 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1517 {
1518 /* We have to be careful about biased types here. Make a subtype
1519 of integer_type_node with the proper biasing. */
1520 if (TREE_CODE (param_type) == INTEGER_TYPE
1521 && TYPE_BIASED_REPRESENTATION_P (param_type))
1522 {
1523 param_type
1524 = copy_type (build_range_type (integer_type_node,
1525 TYPE_MIN_VALUE (param_type),
1526 TYPE_MAX_VALUE (param_type)));
1527
1528 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1529 }
1530 else
1531 param_type = integer_type_node;
1532 }
1533 #endif
1534
1535 DECL_ARG_TYPE (param_decl) = param_type;
1536 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1537 TREE_READONLY (param_decl) = readonly;
1538 return param_decl;
1539 }
1540 \f
1541 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1542
1543 void
1544 process_attributes (tree decl, struct attrib *attr_list)
1545 {
1546 for (; attr_list; attr_list = attr_list->next)
1547 switch (attr_list->type)
1548 {
1549 case ATTR_MACHINE_ATTRIBUTE:
1550 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1551 NULL_TREE),
1552 ATTR_FLAG_TYPE_IN_PLACE);
1553 break;
1554
1555 case ATTR_LINK_ALIAS:
1556 TREE_STATIC (decl) = 1;
1557 assemble_alias (decl, attr_list->name);
1558 break;
1559
1560 case ATTR_WEAK_EXTERNAL:
1561 if (SUPPORTS_WEAK)
1562 declare_weak (decl);
1563 else
1564 post_error ("?weak declarations not supported on this target",
1565 attr_list->error_point);
1566 break;
1567
1568 case ATTR_LINK_SECTION:
1569 #ifdef ASM_OUTPUT_SECTION_NAME
1570 DECL_SECTION_NAME (decl)
1571 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1572 IDENTIFIER_POINTER (attr_list->name));
1573 DECL_COMMON (decl) = 0;
1574 #else
1575 post_error ("?section attributes are not supported for this target",
1576 attr_list->error_point);
1577 #endif
1578 break;
1579 }
1580 }
1581 \f
1582 /* Add some pending elaborations on the list. */
1583
1584 void
1585 add_pending_elaborations (tree var_decl, tree var_init)
1586 {
1587 if (var_init != 0)
1588 Check_Elaboration_Code_Allowed (error_gnat_node);
1589
1590 pending_elaborations
1591 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1592 }
1593
1594 /* Obtain any pending elaborations and clear the old list. */
1595
1596 tree
1597 get_pending_elaborations (void)
1598 {
1599 /* Each thing added to the list went on the end; we want it on the
1600 beginning. */
1601 tree result = TREE_CHAIN (pending_elaborations);
1602
1603 TREE_CHAIN (pending_elaborations) = 0;
1604 return result;
1605 }
1606
1607 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1608 of 2. */
1609
1610 static int
1611 value_factor_p (tree value, int factor)
1612 {
1613 if (host_integerp (value, 1))
1614 return tree_low_cst (value, 1) % factor == 0;
1615
1616 if (TREE_CODE (value) == MULT_EXPR)
1617 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1618 || value_factor_p (TREE_OPERAND (value, 1), factor));
1619
1620 return 0;
1621 }
1622
1623 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1624 unless we can prove these 2 fields are laid out in such a way that no gap
1625 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1626 is the distance in bits between the end of PREV_FIELD and the starting
1627 position of CURR_FIELD. It is ignored if null. */
1628
1629 static int
1630 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1631 {
1632 /* If this is the first field of the record, there cannot be any gap */
1633 if (!prev_field)
1634 return 0;
1635
1636 /* If the previous field is a union type, then return False: The only
1637 time when such a field is not the last field of the record is when
1638 there are other components at fixed positions after it (meaning there
1639 was a rep clause for every field), in which case we don't want the
1640 alignment constraint to override them. */
1641 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1642 return 0;
1643
1644 /* If the distance between the end of prev_field and the begining of
1645 curr_field is constant, then there is a gap if the value of this
1646 constant is not null. */
1647 if (offset && host_integerp (offset, 1))
1648 return (!integer_zerop (offset));
1649
1650 /* If the size and position of the previous field are constant,
1651 then check the sum of this size and position. There will be a gap
1652 iff it is not multiple of the current field alignment. */
1653 if (host_integerp (DECL_SIZE (prev_field), 1)
1654 && host_integerp (bit_position (prev_field), 1))
1655 return ((tree_low_cst (bit_position (prev_field), 1)
1656 + tree_low_cst (DECL_SIZE (prev_field), 1))
1657 % DECL_ALIGN (curr_field) != 0);
1658
1659 /* If both the position and size of the previous field are multiples
1660 of the current field alignment, there can not be any gap. */
1661 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1662 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1663 return 0;
1664
1665 /* Fallback, return that there may be a potential gap */
1666 return 1;
1667 }
1668
1669 /* Return nonzero if there are pending elaborations. */
1670
1671 int
1672 pending_elaborations_p (void)
1673 {
1674 return TREE_CHAIN (pending_elaborations) != 0;
1675 }
1676
1677 /* Save a copy of the current pending elaboration list and make a new
1678 one. */
1679
1680 void
1681 push_pending_elaborations (void)
1682 {
1683 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1684
1685 p->next = elist_stack;
1686 p->elab_list = pending_elaborations;
1687 elist_stack = p;
1688 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1689 }
1690
1691 /* Pop the stack of pending elaborations. */
1692
1693 void
1694 pop_pending_elaborations (void)
1695 {
1696 struct e_stack *p = elist_stack;
1697
1698 pending_elaborations = p->elab_list;
1699 elist_stack = p->next;
1700 }
1701
1702 /* Return the current position in pending_elaborations so we can insert
1703 elaborations after that point. */
1704
1705 tree
1706 get_elaboration_location (void)
1707 {
1708 return tree_last (pending_elaborations);
1709 }
1710
1711 /* Insert the current elaborations after ELAB, which is in some elaboration
1712 list. */
1713
1714 void
1715 insert_elaboration_list (tree elab)
1716 {
1717 tree next = TREE_CHAIN (elab);
1718
1719 if (TREE_CHAIN (pending_elaborations))
1720 {
1721 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1722 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1723 TREE_CHAIN (pending_elaborations) = 0;
1724 }
1725 }
1726
1727 /* Returns a LABEL_DECL node for LABEL_NAME. */
1728
1729 tree
1730 create_label_decl (tree label_name)
1731 {
1732 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1733
1734 DECL_CONTEXT (label_decl) = current_function_decl;
1735 DECL_MODE (label_decl) = VOIDmode;
1736 DECL_SOURCE_LOCATION (label_decl) = input_location;
1737
1738 return label_decl;
1739 }
1740 \f
1741 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1742 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1743 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1744 PARM_DECL nodes chained through the TREE_CHAIN field).
1745
1746 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1747 appropriate fields in the FUNCTION_DECL. */
1748
1749 tree
1750 create_subprog_decl (tree subprog_name,
1751 tree asm_name,
1752 tree subprog_type,
1753 tree param_decl_list,
1754 int inline_flag,
1755 int public_flag,
1756 int extern_flag,
1757 struct attrib *attr_list)
1758 {
1759 tree return_type = TREE_TYPE (subprog_type);
1760 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1761
1762 /* If this is a function nested inside an inlined external function, it
1763 means we aren't going to compile the outer function unless it is
1764 actually inlined, so do the same for us. */
1765 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1766 && DECL_EXTERNAL (current_function_decl))
1767 extern_flag = 1;
1768
1769 DECL_EXTERNAL (subprog_decl) = extern_flag;
1770 TREE_PUBLIC (subprog_decl) = public_flag;
1771 DECL_INLINE (subprog_decl) = inline_flag;
1772 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1773 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1774 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1775 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1776 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1777
1778 if (asm_name != 0)
1779 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1780
1781 process_attributes (subprog_decl, attr_list);
1782
1783 /* Add this decl to the current binding level. */
1784 subprog_decl = pushdecl (subprog_decl);
1785
1786 /* Output the assembler code and/or RTL for the declaration. */
1787 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1788
1789 return subprog_decl;
1790 }
1791 \f
1792 /* Count how deep we are into nested functions. This is because
1793 we shouldn't call the backend function context routines unless we
1794 are in a nested function. */
1795
1796 static int function_nesting_depth;
1797
1798 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1799 body. This routine needs to be invoked before processing the declarations
1800 appearing in the subprogram. */
1801
1802 void
1803 begin_subprog_body (tree subprog_decl)
1804 {
1805 tree param_decl_list;
1806 tree param_decl;
1807 tree next_param;
1808
1809 if (function_nesting_depth++ != 0)
1810 push_function_context ();
1811
1812 announce_function (subprog_decl);
1813
1814 /* Make this field nonzero so further routines know that this is not
1815 tentative. error_mark_node is replaced below (in poplevel) with the
1816 adequate BLOCK. */
1817 DECL_INITIAL (subprog_decl) = error_mark_node;
1818
1819 /* This function exists in static storage. This does not mean `static' in
1820 the C sense! */
1821 TREE_STATIC (subprog_decl) = 1;
1822
1823 /* Enter a new binding level. */
1824 current_function_decl = subprog_decl;
1825 pushlevel (0);
1826
1827 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1828 subprogram body) so that they can be recognized as local variables in the
1829 subprogram.
1830
1831 The list of PARM_DECL nodes is stored in the right order in
1832 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1833 which they are transmitted to `pushdecl' we need to reverse the list of
1834 PARM_DECLs if we want it to be stored in the right order. The reason why
1835 we want to make sure the PARM_DECLs are stored in the correct order is
1836 that this list will be retrieved in a few lines with a call to `getdecl'
1837 to store it back into the DECL_ARGUMENTS field. */
1838 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1839
1840 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1841 {
1842 next_param = TREE_CHAIN (param_decl);
1843 TREE_CHAIN (param_decl) = NULL;
1844 pushdecl (param_decl);
1845 }
1846
1847 /* Store back the PARM_DECL nodes. They appear in the right order. */
1848 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1849
1850 init_function_start (subprog_decl);
1851 expand_function_start (subprog_decl, 0);
1852
1853 /* If this function is `main', emit a call to `__main'
1854 to run global initializers, etc. */
1855 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1856 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1857 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1858 expand_main_function ();
1859 }
1860
1861 /* Finish the definition of the current subprogram and compile it all the way
1862 to assembler language output. */
1863
1864 void
1865 end_subprog_body (void)
1866 {
1867 tree decl;
1868 tree cico_list;
1869
1870 poplevel (1, 0, 1);
1871 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1872 = current_function_decl;
1873
1874 /* Mark the RESULT_DECL as being in this subprogram. */
1875 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1876
1877 expand_function_end ();
1878
1879 /* If this is a nested function, push a new GC context. That will keep
1880 local variables on the stack from being collected while we're doing
1881 the compilation of this function. */
1882 if (function_nesting_depth > 1)
1883 ggc_push_context ();
1884
1885 rest_of_compilation (current_function_decl);
1886
1887 if (function_nesting_depth > 1)
1888 ggc_pop_context ();
1889
1890 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1891 not be seen when we call this function and will be in
1892 unallocated memory anyway. */
1893 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1894 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1895 TREE_VALUE (cico_list) = 0;
1896
1897 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1898 {
1899 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1900 was saved for inline, in which case the DECL_RTLs are in
1901 preserved memory. */
1902 for (decl = DECL_ARGUMENTS (current_function_decl);
1903 decl != 0; decl = TREE_CHAIN (decl))
1904 {
1905 SET_DECL_RTL (decl, 0);
1906 DECL_INCOMING_RTL (decl) = 0;
1907 }
1908
1909 /* Similarly, discard DECL_RTL of the return value. */
1910 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1911
1912 /* But DECL_INITIAL must remain nonzero so we know this
1913 was an actual function definition unless toplev.c decided not
1914 to inline it. */
1915 if (DECL_INITIAL (current_function_decl) != 0)
1916 DECL_INITIAL (current_function_decl) = error_mark_node;
1917
1918 DECL_ARGUMENTS (current_function_decl) = 0;
1919 }
1920
1921 /* If we are not at the bottom of the function nesting stack, pop up to
1922 the containing function. Otherwise show we aren't in any function. */
1923 if (--function_nesting_depth != 0)
1924 pop_function_context ();
1925 else
1926 current_function_decl = 0;
1927 }
1928 \f
1929 /* Return a definition for a builtin function named NAME and whose data type
1930 is TYPE. TYPE should be a function type with argument types.
1931 FUNCTION_CODE tells later passes how to compile calls to this function.
1932 See tree.h for its possible values.
1933
1934 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1935 the name to be called if we can't opencode the function. If
1936 ATTRS is nonzero, use that for the function attribute list. */
1937
1938 tree
1939 builtin_function (const char *name,
1940 tree type,
1941 int function_code,
1942 enum built_in_class class,
1943 const char *library_name,
1944 tree attrs)
1945 {
1946 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1947
1948 DECL_EXTERNAL (decl) = 1;
1949 TREE_PUBLIC (decl) = 1;
1950 if (library_name)
1951 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1952
1953 pushdecl (decl);
1954 DECL_BUILT_IN_CLASS (decl) = class;
1955 DECL_FUNCTION_CODE (decl) = function_code;
1956 if (attrs)
1957 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1958 return decl;
1959 }
1960
1961 /* Return an integer type with the number of bits of precision given by
1962 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1963 it is a signed type. */
1964
1965 tree
1966 gnat_type_for_size (unsigned precision, int unsignedp)
1967 {
1968 tree t;
1969 char type_name[20];
1970
1971 if (precision <= 2 * MAX_BITS_PER_WORD
1972 && signed_and_unsigned_types[precision][unsignedp] != 0)
1973 return signed_and_unsigned_types[precision][unsignedp];
1974
1975 if (unsignedp)
1976 t = make_unsigned_type (precision);
1977 else
1978 t = make_signed_type (precision);
1979
1980 if (precision <= 2 * MAX_BITS_PER_WORD)
1981 signed_and_unsigned_types[precision][unsignedp] = t;
1982
1983 if (TYPE_NAME (t) == 0)
1984 {
1985 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1986 TYPE_NAME (t) = get_identifier (type_name);
1987 }
1988
1989 return t;
1990 }
1991
1992 /* Likewise for floating-point types. */
1993
1994 static tree
1995 float_type_for_size (int precision, enum machine_mode mode)
1996 {
1997 tree t;
1998 char type_name[20];
1999
2000 if (float_types[(int) mode] != 0)
2001 return float_types[(int) mode];
2002
2003 float_types[(int) mode] = t = make_node (REAL_TYPE);
2004 TYPE_PRECISION (t) = precision;
2005 layout_type (t);
2006
2007 if (TYPE_MODE (t) != mode)
2008 gigi_abort (414);
2009
2010 if (TYPE_NAME (t) == 0)
2011 {
2012 sprintf (type_name, "FLOAT_%d", precision);
2013 TYPE_NAME (t) = get_identifier (type_name);
2014 }
2015
2016 return t;
2017 }
2018
2019 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2020 an unsigned type; otherwise a signed type is returned. */
2021
2022 tree
2023 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2024 {
2025 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2026 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2027 else
2028 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2029 }
2030
2031 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2032
2033 tree
2034 gnat_unsigned_type (tree type_node)
2035 {
2036 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2037
2038 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2039 {
2040 type = copy_node (type);
2041 TREE_TYPE (type) = type_node;
2042 }
2043 else if (TREE_TYPE (type_node) != 0
2044 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2045 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2046 {
2047 type = copy_node (type);
2048 TREE_TYPE (type) = TREE_TYPE (type_node);
2049 }
2050
2051 return type;
2052 }
2053
2054 /* Return the signed version of a TYPE_NODE, a scalar type. */
2055
2056 tree
2057 gnat_signed_type (tree type_node)
2058 {
2059 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2060
2061 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2062 {
2063 type = copy_node (type);
2064 TREE_TYPE (type) = type_node;
2065 }
2066 else if (TREE_TYPE (type_node) != 0
2067 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2068 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2069 {
2070 type = copy_node (type);
2071 TREE_TYPE (type) = TREE_TYPE (type_node);
2072 }
2073
2074 return type;
2075 }
2076
2077 /* Return a type the same as TYPE except unsigned or signed according to
2078 UNSIGNEDP. */
2079
2080 tree
2081 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2082 {
2083 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2084 return type;
2085 else
2086 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2087 }
2088 \f
2089 /* EXP is an expression for the size of an object. If this size contains
2090 discriminant references, replace them with the maximum (if MAX_P) or
2091 minimum (if ! MAX_P) possible value of the discriminant. */
2092
2093 tree
2094 max_size (tree exp, int max_p)
2095 {
2096 enum tree_code code = TREE_CODE (exp);
2097 tree type = TREE_TYPE (exp);
2098
2099 switch (TREE_CODE_CLASS (code))
2100 {
2101 case 'd':
2102 case 'c':
2103 return exp;
2104
2105 case 'x':
2106 if (code == TREE_LIST)
2107 return tree_cons (TREE_PURPOSE (exp),
2108 max_size (TREE_VALUE (exp), max_p),
2109 TREE_CHAIN (exp) != 0
2110 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2111 break;
2112
2113 case 'r':
2114 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2115 modify. Otherwise, we treat it like a variable. */
2116 if (! CONTAINS_PLACEHOLDER_P (exp))
2117 return exp;
2118
2119 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2120 return
2121 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2122
2123 case '<':
2124 return max_p ? size_one_node : size_zero_node;
2125
2126 case '1':
2127 case '2':
2128 case 'e':
2129 switch (TREE_CODE_LENGTH (code))
2130 {
2131 case 1:
2132 if (code == NON_LVALUE_EXPR)
2133 return max_size (TREE_OPERAND (exp, 0), max_p);
2134 else
2135 return
2136 fold (build1 (code, type,
2137 max_size (TREE_OPERAND (exp, 0),
2138 code == NEGATE_EXPR ? ! max_p : max_p)));
2139
2140 case 2:
2141 if (code == RTL_EXPR)
2142 gigi_abort (407);
2143 else if (code == COMPOUND_EXPR)
2144 return max_size (TREE_OPERAND (exp, 1), max_p);
2145 else if (code == WITH_RECORD_EXPR)
2146 return exp;
2147
2148 {
2149 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2150 tree rhs = max_size (TREE_OPERAND (exp, 1),
2151 code == MINUS_EXPR ? ! max_p : max_p);
2152
2153 /* Special-case wanting the maximum value of a MIN_EXPR.
2154 In that case, if one side overflows, return the other.
2155 sizetype is signed, but we know sizes are non-negative.
2156 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2157 overflowing or the maximum possible value and the RHS
2158 a variable. */
2159 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2160 return lhs;
2161 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2162 return rhs;
2163 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2164 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2165 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2166 && ! TREE_CONSTANT (rhs))
2167 return lhs;
2168 else
2169 return fold (build (code, type, lhs, rhs));
2170 }
2171
2172 case 3:
2173 if (code == SAVE_EXPR)
2174 return exp;
2175 else if (code == COND_EXPR)
2176 return fold (build (MAX_EXPR, type,
2177 max_size (TREE_OPERAND (exp, 1), max_p),
2178 max_size (TREE_OPERAND (exp, 2), max_p)));
2179 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2180 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2181 max_size (TREE_OPERAND (exp, 1), max_p));
2182 }
2183 }
2184
2185 gigi_abort (408);
2186 }
2187 \f
2188 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2189 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2190 Return a constructor for the template. */
2191
2192 tree
2193 build_template (tree template_type, tree array_type, tree expr)
2194 {
2195 tree template_elts = NULL_TREE;
2196 tree bound_list = NULL_TREE;
2197 tree field;
2198
2199 if (TREE_CODE (array_type) == RECORD_TYPE
2200 && (TYPE_IS_PADDING_P (array_type)
2201 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2202 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2203
2204 if (TREE_CODE (array_type) == ARRAY_TYPE
2205 || (TREE_CODE (array_type) == INTEGER_TYPE
2206 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2207 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2208
2209 /* First make the list for a CONSTRUCTOR for the template. Go down the
2210 field list of the template instead of the type chain because this
2211 array might be an Ada array of arrays and we can't tell where the
2212 nested arrays stop being the underlying object. */
2213
2214 for (field = TYPE_FIELDS (template_type); field;
2215 (bound_list != 0
2216 ? (bound_list = TREE_CHAIN (bound_list))
2217 : (array_type = TREE_TYPE (array_type))),
2218 field = TREE_CHAIN (TREE_CHAIN (field)))
2219 {
2220 tree bounds, min, max;
2221
2222 /* If we have a bound list, get the bounds from there. Likewise
2223 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2224 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2225 This will give us a maximum range. */
2226 if (bound_list != 0)
2227 bounds = TREE_VALUE (bound_list);
2228 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2229 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2230 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2231 && DECL_BY_COMPONENT_PTR_P (expr))
2232 bounds = TREE_TYPE (field);
2233 else
2234 gigi_abort (411);
2235
2236 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2237 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2238
2239 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2240 surround them with a WITH_RECORD_EXPR giving EXPR as the
2241 OBJECT. */
2242 if (CONTAINS_PLACEHOLDER_P (min))
2243 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2244 if (CONTAINS_PLACEHOLDER_P (max))
2245 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2246
2247 template_elts = tree_cons (TREE_CHAIN (field), max,
2248 tree_cons (field, min, template_elts));
2249 }
2250
2251 return gnat_build_constructor (template_type, nreverse (template_elts));
2252 }
2253 \f
2254 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2255 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2256 in the type contains in its DECL_INITIAL the expression to use when
2257 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2258 to print out an error message if the mechanism cannot be applied to
2259 an object of that type and also for the name. */
2260
2261 tree
2262 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2263 {
2264 tree record_type = make_node (RECORD_TYPE);
2265 tree field_list = 0;
2266 int class;
2267 int dtype = 0;
2268 tree inner_type;
2269 int ndim;
2270 int i;
2271 tree *idx_arr;
2272 tree tem;
2273
2274 /* If TYPE is an unconstrained array, use the underlying array type. */
2275 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2276 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2277
2278 /* If this is an array, compute the number of dimensions in the array,
2279 get the index types, and point to the inner type. */
2280 if (TREE_CODE (type) != ARRAY_TYPE)
2281 ndim = 0;
2282 else
2283 for (ndim = 1, inner_type = type;
2284 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2285 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2286 ndim++, inner_type = TREE_TYPE (inner_type))
2287 ;
2288
2289 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2290
2291 if (mech != By_Descriptor_NCA
2292 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2293 for (i = ndim - 1, inner_type = type;
2294 i >= 0;
2295 i--, inner_type = TREE_TYPE (inner_type))
2296 idx_arr[i] = TYPE_DOMAIN (inner_type);
2297 else
2298 for (i = 0, inner_type = type;
2299 i < ndim;
2300 i++, inner_type = TREE_TYPE (inner_type))
2301 idx_arr[i] = TYPE_DOMAIN (inner_type);
2302
2303 /* Now get the DTYPE value. */
2304 switch (TREE_CODE (type))
2305 {
2306 case INTEGER_TYPE:
2307 case ENUMERAL_TYPE:
2308 if (TYPE_VAX_FLOATING_POINT_P (type))
2309 switch ((int) TYPE_DIGITS_VALUE (type))
2310 {
2311 case 6:
2312 dtype = 10;
2313 break;
2314 case 9:
2315 dtype = 11;
2316 break;
2317 case 15:
2318 dtype = 27;
2319 break;
2320 }
2321 else
2322 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2323 {
2324 case 8:
2325 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2326 break;
2327 case 16:
2328 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2329 break;
2330 case 32:
2331 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2332 break;
2333 case 64:
2334 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2335 break;
2336 case 128:
2337 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2338 break;
2339 }
2340 break;
2341
2342 case REAL_TYPE:
2343 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2344 break;
2345
2346 case COMPLEX_TYPE:
2347 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2348 && TYPE_VAX_FLOATING_POINT_P (type))
2349 switch ((int) TYPE_DIGITS_VALUE (type))
2350 {
2351 case 6:
2352 dtype = 12;
2353 break;
2354 case 9:
2355 dtype = 13;
2356 break;
2357 case 15:
2358 dtype = 29;
2359 }
2360 else
2361 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2362 break;
2363
2364 case ARRAY_TYPE:
2365 dtype = 14;
2366 break;
2367
2368 default:
2369 break;
2370 }
2371
2372 /* Get the CLASS value. */
2373 switch (mech)
2374 {
2375 case By_Descriptor_A:
2376 class = 4;
2377 break;
2378 case By_Descriptor_NCA:
2379 class = 10;
2380 break;
2381 case By_Descriptor_SB:
2382 class = 15;
2383 break;
2384 default:
2385 class = 1;
2386 }
2387
2388 /* Make the type for a descriptor for VMS. The first four fields
2389 are the same for all types. */
2390
2391 field_list
2392 = chainon (field_list,
2393 make_descriptor_field
2394 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2395 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2396
2397 field_list = chainon (field_list,
2398 make_descriptor_field ("DTYPE",
2399 gnat_type_for_size (8, 1),
2400 record_type, size_int (dtype)));
2401 field_list = chainon (field_list,
2402 make_descriptor_field ("CLASS",
2403 gnat_type_for_size (8, 1),
2404 record_type, size_int (class)));
2405
2406 field_list
2407 = chainon (field_list,
2408 make_descriptor_field ("POINTER",
2409 build_pointer_type (type),
2410 record_type,
2411 build1 (ADDR_EXPR,
2412 build_pointer_type (type),
2413 build (PLACEHOLDER_EXPR,
2414 type))));
2415
2416 switch (mech)
2417 {
2418 case By_Descriptor:
2419 case By_Descriptor_S:
2420 break;
2421
2422 case By_Descriptor_SB:
2423 field_list
2424 = chainon (field_list,
2425 make_descriptor_field
2426 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2427 TREE_CODE (type) == ARRAY_TYPE
2428 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2429 field_list
2430 = chainon (field_list,
2431 make_descriptor_field
2432 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2433 TREE_CODE (type) == ARRAY_TYPE
2434 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2435 break;
2436
2437 case By_Descriptor_A:
2438 case By_Descriptor_NCA:
2439 field_list = chainon (field_list,
2440 make_descriptor_field ("SCALE",
2441 gnat_type_for_size (8, 1),
2442 record_type,
2443 size_zero_node));
2444
2445 field_list = chainon (field_list,
2446 make_descriptor_field ("DIGITS",
2447 gnat_type_for_size (8, 1),
2448 record_type,
2449 size_zero_node));
2450
2451 field_list
2452 = chainon (field_list,
2453 make_descriptor_field
2454 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2455 size_int (mech == By_Descriptor_NCA
2456 ? 0
2457 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2458 : (TREE_CODE (type) == ARRAY_TYPE
2459 && TYPE_CONVENTION_FORTRAN_P (type)
2460 ? 224 : 192))));
2461
2462 field_list = chainon (field_list,
2463 make_descriptor_field ("DIMCT",
2464 gnat_type_for_size (8, 1),
2465 record_type,
2466 size_int (ndim)));
2467
2468 field_list = chainon (field_list,
2469 make_descriptor_field ("ARSIZE",
2470 gnat_type_for_size (32, 1),
2471 record_type,
2472 size_in_bytes (type)));
2473
2474 /* Now build a pointer to the 0,0,0... element. */
2475 tem = build (PLACEHOLDER_EXPR, type);
2476 for (i = 0, inner_type = type; i < ndim;
2477 i++, inner_type = TREE_TYPE (inner_type))
2478 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2479 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2480
2481 field_list
2482 = chainon (field_list,
2483 make_descriptor_field
2484 ("A0", build_pointer_type (inner_type), record_type,
2485 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2486
2487 /* Next come the addressing coefficients. */
2488 tem = size_int (1);
2489 for (i = 0; i < ndim; i++)
2490 {
2491 char fname[3];
2492 tree idx_length
2493 = size_binop (MULT_EXPR, tem,
2494 size_binop (PLUS_EXPR,
2495 size_binop (MINUS_EXPR,
2496 TYPE_MAX_VALUE (idx_arr[i]),
2497 TYPE_MIN_VALUE (idx_arr[i])),
2498 size_int (1)));
2499
2500 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2501 fname[1] = '0' + i, fname[2] = 0;
2502 field_list
2503 = chainon (field_list,
2504 make_descriptor_field (fname,
2505 gnat_type_for_size (32, 1),
2506 record_type, idx_length));
2507
2508 if (mech == By_Descriptor_NCA)
2509 tem = idx_length;
2510 }
2511
2512 /* Finally here are the bounds. */
2513 for (i = 0; i < ndim; i++)
2514 {
2515 char fname[3];
2516
2517 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2518 field_list
2519 = chainon (field_list,
2520 make_descriptor_field
2521 (fname, gnat_type_for_size (32, 1), record_type,
2522 TYPE_MIN_VALUE (idx_arr[i])));
2523
2524 fname[0] = 'U';
2525 field_list
2526 = chainon (field_list,
2527 make_descriptor_field
2528 (fname, gnat_type_for_size (32, 1), record_type,
2529 TYPE_MAX_VALUE (idx_arr[i])));
2530 }
2531 break;
2532
2533 default:
2534 post_error ("unsupported descriptor type for &", gnat_entity);
2535 }
2536
2537 finish_record_type (record_type, field_list, 0, 1);
2538 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2539 record_type));
2540
2541 return record_type;
2542 }
2543
2544 /* Utility routine for above code to make a field. */
2545
2546 static tree
2547 make_descriptor_field (const char *name, tree type, tree rec_type, tree initial)
2548 {
2549 tree field
2550 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2551
2552 DECL_INITIAL (field) = initial;
2553 return field;
2554 }
2555 \f
2556 /* Build a type to be used to represent an aliased object whose nominal
2557 type is an unconstrained array. This consists of a RECORD_TYPE containing
2558 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2559 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2560 is used to represent an arbitrary unconstrained object. Use NAME
2561 as the name of the record. */
2562
2563 tree
2564 build_unc_object_type (tree template_type, tree object_type, tree name)
2565 {
2566 tree type = make_node (RECORD_TYPE);
2567 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2568 template_type, type, 0, 0, 0, 1);
2569 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2570 type, 0, 0, 0, 1);
2571
2572 TYPE_NAME (type) = name;
2573 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2574 finish_record_type (type,
2575 chainon (chainon (NULL_TREE, template_field),
2576 array_field),
2577 0, 0);
2578
2579 return type;
2580 }
2581 \f
2582 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2583 the normal case this is just two adjustments, but we have more to do
2584 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2585
2586 void
2587 update_pointer_to (tree old_type, tree new_type)
2588 {
2589 tree ptr = TYPE_POINTER_TO (old_type);
2590 tree ref = TYPE_REFERENCE_TO (old_type);
2591 tree type;
2592
2593 /* If this is the main variant, process all the other variants first. */
2594 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2595 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2596 type = TYPE_NEXT_VARIANT (type))
2597 update_pointer_to (type, new_type);
2598
2599 /* If no pointer or reference, we are done. */
2600 if (ptr == 0 && ref == 0)
2601 return;
2602
2603 /* Merge the old type qualifiers in the new type.
2604
2605 Each old variant has qualifiers for specific reasons, and the new
2606 designated type as well. Each set of qualifiers represents useful
2607 information grabbed at some point, and merging the two simply unifies
2608 these inputs into the final type description.
2609
2610 Consider for instance a volatile type frozen after an access to constant
2611 type designating it. After the designated type freeze, we get here with a
2612 volatile new_type and a dummy old_type with a readonly variant, created
2613 when the access type was processed. We shall make a volatile and readonly
2614 designated type, because that's what it really is.
2615
2616 We might also get here for a non-dummy old_type variant with different
2617 qualifiers than the new_type ones, for instance in some cases of pointers
2618 to private record type elaboration (see the comments around the call to
2619 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2620 qualifiers in thoses cases too, to avoid accidentally discarding the
2621 initial set, and will often end up with old_type == new_type then. */
2622 new_type = build_qualified_type (new_type,
2623 TYPE_QUALS (old_type)
2624 | TYPE_QUALS (new_type));
2625
2626 /* If the new type and the old one are identical, there is nothing to
2627 update. */
2628 if (old_type == new_type)
2629 return;
2630
2631 /* Otherwise, first handle the simple case. */
2632 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2633 {
2634 if (ptr != 0)
2635 TREE_TYPE (ptr) = new_type;
2636 TYPE_POINTER_TO (new_type) = ptr;
2637
2638 if (ref != 0)
2639 TREE_TYPE (ref) = new_type;
2640 TYPE_REFERENCE_TO (new_type) = ref;
2641
2642 if (ptr != 0 && TYPE_NAME (ptr) != 0
2643 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2644 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2645 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2646 global_bindings_p (), 0);
2647 if (ref != 0 && TYPE_NAME (ref) != 0
2648 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2649 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2650 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2651 global_bindings_p (), 0);
2652 }
2653
2654 /* Now deal with the unconstrained array case. In this case the "pointer"
2655 is actually a RECORD_TYPE where the types of both fields are
2656 pointers to void. In that case, copy the field list from the
2657 old type to the new one and update the fields' context. */
2658 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2659 gigi_abort (412);
2660
2661 else
2662 {
2663 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2664 tree ptr_temp_type;
2665 tree new_ref;
2666 tree var;
2667
2668 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2669 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2670 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2671
2672 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2673 template bounds.
2674
2675 ??? This is now the only use of gnat_substitute_in_type, which
2676 is now a very "heavy" routine to do this, so it should be replaced
2677 at some point. */
2678 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2679 new_ref = build (COMPONENT_REF, ptr_temp_type,
2680 build (PLACEHOLDER_EXPR, ptr),
2681 TREE_CHAIN (TYPE_FIELDS (ptr)));
2682
2683 update_pointer_to
2684 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2685 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2686 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2687
2688 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2689 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2690
2691 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2692 = TREE_TYPE (new_type) = ptr;
2693
2694 /* Now handle updating the allocation record, what the thin pointer
2695 points to. Update all pointers from the old record into the new
2696 one, update the types of the fields, and recompute the size. */
2697
2698 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2699
2700 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2701 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2702 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2703 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2704 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2705 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2706 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2707
2708 TYPE_SIZE (new_obj_rec)
2709 = size_binop (PLUS_EXPR,
2710 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2711 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2712 TYPE_SIZE_UNIT (new_obj_rec)
2713 = size_binop (PLUS_EXPR,
2714 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2715 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2716 rest_of_type_compilation (ptr, global_bindings_p ());
2717 }
2718 }
2719 \f
2720 /* Convert a pointer to a constrained array into a pointer to a fat
2721 pointer. This involves making or finding a template. */
2722
2723 static tree
2724 convert_to_fat_pointer (tree type, tree expr)
2725 {
2726 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2727 tree template, template_addr;
2728 tree etype = TREE_TYPE (expr);
2729
2730 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2731 pointer to the template and array. */
2732 if (integer_zerop (expr))
2733 return
2734 gnat_build_constructor
2735 (type,
2736 tree_cons (TYPE_FIELDS (type),
2737 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2738 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2739 convert (build_pointer_type (template_type),
2740 expr),
2741 NULL_TREE)));
2742
2743 /* If EXPR is a thin pointer, make the template and data from the record. */
2744
2745 else if (TYPE_THIN_POINTER_P (etype))
2746 {
2747 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2748
2749 expr = save_expr (expr);
2750 if (TREE_CODE (expr) == ADDR_EXPR)
2751 expr = TREE_OPERAND (expr, 0);
2752 else
2753 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2754
2755 template = build_component_ref (expr, NULL_TREE, fields, 0);
2756 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2757 build_component_ref (expr, NULL_TREE,
2758 TREE_CHAIN (fields), 0));
2759 }
2760 else
2761 /* Otherwise, build the constructor for the template. */
2762 template = build_template (template_type, TREE_TYPE (etype), expr);
2763
2764 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2765
2766 /* The result is a CONSTRUCTOR for the fat pointer.
2767
2768 If expr is an argument of a foreign convention subprogram, the type it
2769 points to is directly the component type. In this case, the expression
2770 type may not match the corresponding FIELD_DECL type at this point, so we
2771 call "convert" here to fix that up if necessary. This type consistency is
2772 required, for instance because it ensures that possible later folding of
2773 component_refs against this constructor always yields something of the
2774 same type as the initial reference.
2775
2776 Note that the call to "build_template" above is still fine, because it
2777 will only refer to the provided template_type in this case. */
2778 return
2779 gnat_build_constructor
2780 (type, tree_cons (TYPE_FIELDS (type),
2781 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2782 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2783 template_addr, NULL_TREE)));
2784 }
2785 \f
2786 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2787 is something that is a fat pointer, so convert to it first if it EXPR
2788 is not already a fat pointer. */
2789
2790 static tree
2791 convert_to_thin_pointer (tree type, tree expr)
2792 {
2793 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2794 expr
2795 = convert_to_fat_pointer
2796 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2797
2798 /* We get the pointer to the data and use a NOP_EXPR to make it the
2799 proper GCC type. */
2800 expr
2801 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2802 expr = build1 (NOP_EXPR, type, expr);
2803
2804 return expr;
2805 }
2806 \f
2807 /* Create an expression whose value is that of EXPR,
2808 converted to type TYPE. The TREE_TYPE of the value
2809 is always TYPE. This function implements all reasonable
2810 conversions; callers should filter out those that are
2811 not permitted by the language being compiled. */
2812
2813 tree
2814 convert (tree type, tree expr)
2815 {
2816 enum tree_code code = TREE_CODE (type);
2817 tree etype = TREE_TYPE (expr);
2818 enum tree_code ecode = TREE_CODE (etype);
2819 tree tem;
2820
2821 /* If EXPR is already the right type, we are done. */
2822 if (type == etype)
2823 return expr;
2824 /* If we're converting between two aggregate types that have the same main
2825 variant, just make a NOP_EXPR. */
2826 else if (AGGREGATE_TYPE_P (type)
2827 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2828 return build1 (NOP_EXPR, type, expr);
2829 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2830 new one. */
2831 else if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2832 return build (WITH_RECORD_EXPR, type,
2833 convert (type, TREE_OPERAND (expr, 0)),
2834 TREE_OPERAND (expr, 1));
2835
2836 /* If the input type has padding, remove it by doing a component reference
2837 to the field. If the output type has padding, make a constructor
2838 to build the record. If both input and output have padding and are
2839 of variable size, do this as an unchecked conversion. */
2840 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2841 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2842 && (! TREE_CONSTANT (TYPE_SIZE (type))
2843 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2844 ;
2845 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2846 {
2847 /* If we have just converted to this padded type, just get
2848 the inner expression. */
2849 if (TREE_CODE (expr) == CONSTRUCTOR
2850 && CONSTRUCTOR_ELTS (expr) != 0
2851 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2852 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2853 else
2854 return convert (type, build_component_ref (expr, NULL_TREE,
2855 TYPE_FIELDS (etype), 0));
2856 }
2857 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2858 {
2859 /* If we previously converted from another type and our type is
2860 of variable size, remove the conversion to avoid the need for
2861 variable-size temporaries. */
2862 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2863 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2864 expr = TREE_OPERAND (expr, 0);
2865
2866 /* If we are just removing the padding from expr, convert the original
2867 object if we have variable size. That will avoid the need
2868 for some variable-size temporaries. */
2869 if (TREE_CODE (expr) == COMPONENT_REF
2870 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2871 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2872 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2873 return convert (type, TREE_OPERAND (expr, 0));
2874
2875 /* If the result type is a padded type with a self-referentially-sized
2876 field and the expression type is a record, do this as an
2877 unchecked converstion. */
2878 else if (TREE_CODE (etype) == RECORD_TYPE
2879 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2880 return unchecked_convert (type, expr, 0);
2881
2882 else
2883 return
2884 gnat_build_constructor (type,
2885 tree_cons (TYPE_FIELDS (type),
2886 convert (TREE_TYPE
2887 (TYPE_FIELDS (type)),
2888 expr),
2889 NULL_TREE));
2890 }
2891
2892 /* If the input is a biased type, adjust first. */
2893 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2894 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2895 fold (build1 (GNAT_NOP_EXPR,
2896 TREE_TYPE (etype), expr)),
2897 TYPE_MIN_VALUE (etype))));
2898
2899 /* If the input is a left-justified modular type, we need to extract
2900 the actual object before converting it to any other type with the
2901 exception of an unconstrained array. */
2902 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2903 && code != UNCONSTRAINED_ARRAY_TYPE)
2904 return convert (type, build_component_ref (expr, NULL_TREE,
2905 TYPE_FIELDS (etype), 0));
2906
2907 /* If converting to a type that contains a template, convert to the data
2908 type and then build the template. */
2909 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2910 {
2911 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2912
2913 /* If the source already has a template, get a reference to the
2914 associated array only, as we are going to rebuild a template
2915 for the target type anyway. */
2916 expr = maybe_unconstrained_array (expr);
2917
2918 return
2919 gnat_build_constructor
2920 (type,
2921 tree_cons (TYPE_FIELDS (type),
2922 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2923 obj_type, NULL_TREE),
2924 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2925 convert (obj_type, expr), NULL_TREE)));
2926 }
2927
2928 /* There are some special cases of expressions that we process
2929 specially. */
2930 switch (TREE_CODE (expr))
2931 {
2932 case ERROR_MARK:
2933 return expr;
2934
2935 case TRANSFORM_EXPR:
2936 case NULL_EXPR:
2937 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2938 conversion in gnat_expand_expr. NULL_EXPR does not represent
2939 and actual value, so no conversion is needed. */
2940 expr = copy_node (expr);
2941 TREE_TYPE (expr) = type;
2942 return expr;
2943
2944 case STRING_CST:
2945 case CONSTRUCTOR:
2946 /* If we are converting a STRING_CST to another constrained array type,
2947 just make a new one in the proper type. Likewise for a
2948 CONSTRUCTOR. */
2949 if (code == ecode && AGGREGATE_TYPE_P (etype)
2950 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2951 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2952 {
2953 expr = copy_node (expr);
2954 TREE_TYPE (expr) = type;
2955 return expr;
2956 }
2957 break;
2958
2959 case COMPONENT_REF:
2960 /* If we are converting between two aggregate types of the same
2961 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2962 This avoid unneeded conversions which makes reference computations
2963 more complex. */
2964 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2965 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2966 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2967 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2968 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2969 TREE_OPERAND (expr, 1));
2970
2971 break;
2972
2973 case UNCONSTRAINED_ARRAY_REF:
2974 /* Convert this to the type of the inner array by getting the address of
2975 the array from the template. */
2976 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2977 build_component_ref (TREE_OPERAND (expr, 0),
2978 get_identifier ("P_ARRAY"),
2979 NULL_TREE, 0));
2980 etype = TREE_TYPE (expr);
2981 ecode = TREE_CODE (etype);
2982 break;
2983
2984 case VIEW_CONVERT_EXPR:
2985 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2986 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2987 return convert (type, TREE_OPERAND (expr, 0));
2988 break;
2989
2990 case INDIRECT_REF:
2991 /* If both types are record types, just convert the pointer and
2992 make a new INDIRECT_REF.
2993
2994 ??? Disable this for now since it causes problems with the
2995 code in build_binary_op for MODIFY_EXPR which wants to
2996 strip off conversions. But that code really is a mess and
2997 we need to do this a much better way some time. */
2998 if (0
2999 && (TREE_CODE (type) == RECORD_TYPE
3000 || TREE_CODE (type) == UNION_TYPE)
3001 && (TREE_CODE (etype) == RECORD_TYPE
3002 || TREE_CODE (etype) == UNION_TYPE)
3003 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3004 return build_unary_op (INDIRECT_REF, NULL_TREE,
3005 convert (build_pointer_type (type),
3006 TREE_OPERAND (expr, 0)));
3007 break;
3008
3009 default:
3010 break;
3011 }
3012
3013 /* Check for converting to a pointer to an unconstrained array. */
3014 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3015 return convert_to_fat_pointer (type, expr);
3016
3017 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3018 || (code == INTEGER_CST && ecode == INTEGER_CST
3019 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3020 return fold (build1 (NOP_EXPR, type, expr));
3021
3022 switch (code)
3023 {
3024 case VOID_TYPE:
3025 return build1 (CONVERT_EXPR, type, expr);
3026
3027 case INTEGER_TYPE:
3028 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3029 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3030 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3031 return unchecked_convert (type, expr, 0);
3032 else if (TYPE_BIASED_REPRESENTATION_P (type))
3033 return fold (build1 (CONVERT_EXPR, type,
3034 fold (build (MINUS_EXPR, TREE_TYPE (type),
3035 convert (TREE_TYPE (type), expr),
3036 TYPE_MIN_VALUE (type)))));
3037
3038 /* ... fall through ... */
3039
3040 case ENUMERAL_TYPE:
3041 return fold (convert_to_integer (type, expr));
3042
3043 case POINTER_TYPE:
3044 case REFERENCE_TYPE:
3045 /* If converting between two pointers to records denoting
3046 both a template and type, adjust if needed to account
3047 for any differing offsets, since one might be negative. */
3048 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3049 {
3050 tree bit_diff
3051 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3052 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3053 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3054 sbitsize_int (BITS_PER_UNIT));
3055
3056 expr = build1 (NOP_EXPR, type, expr);
3057 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3058 if (integer_zerop (byte_diff))
3059 return expr;
3060
3061 return build_binary_op (PLUS_EXPR, type, expr,
3062 fold (convert_to_pointer (type, byte_diff)));
3063 }
3064
3065 /* If converting to a thin pointer, handle specially. */
3066 if (TYPE_THIN_POINTER_P (type)
3067 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3068 return convert_to_thin_pointer (type, expr);
3069
3070 /* If converting fat pointer to normal pointer, get the pointer to the
3071 array and then convert it. */
3072 else if (TYPE_FAT_POINTER_P (etype))
3073 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3074 NULL_TREE, 0);
3075
3076 return fold (convert_to_pointer (type, expr));
3077
3078 case REAL_TYPE:
3079 return fold (convert_to_real (type, expr));
3080
3081 case RECORD_TYPE:
3082 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3083 return
3084 gnat_build_constructor
3085 (type, tree_cons (TYPE_FIELDS (type),
3086 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3087 NULL_TREE));
3088
3089 /* ... fall through ... */
3090
3091 case ARRAY_TYPE:
3092 /* In these cases, assume the front-end has validated the conversion.
3093 If the conversion is valid, it will be a bit-wise conversion, so
3094 it can be viewed as an unchecked conversion. */
3095 return unchecked_convert (type, expr, 0);
3096
3097 case UNION_TYPE:
3098 /* Just validate that the type is indeed that of a field
3099 of the type. Then make the simple conversion. */
3100 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3101 {
3102 if (TREE_TYPE (tem) == etype)
3103 return build1 (CONVERT_EXPR, type, expr);
3104 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3105 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3106 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3107 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3108 return build1 (CONVERT_EXPR, type,
3109 convert (TREE_TYPE (tem), expr));
3110 }
3111
3112 gigi_abort (413);
3113
3114 case UNCONSTRAINED_ARRAY_TYPE:
3115 /* If EXPR is a constrained array, take its address, convert it to a
3116 fat pointer, and then dereference it. Likewise if EXPR is a
3117 record containing both a template and a constrained array.
3118 Note that a record representing a left justified modular type
3119 always represents a packed constrained array. */
3120 if (ecode == ARRAY_TYPE
3121 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3122 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3123 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3124 return
3125 build_unary_op
3126 (INDIRECT_REF, NULL_TREE,
3127 convert_to_fat_pointer (TREE_TYPE (type),
3128 build_unary_op (ADDR_EXPR,
3129 NULL_TREE, expr)));
3130
3131 /* Do something very similar for converting one unconstrained
3132 array to another. */
3133 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3134 return
3135 build_unary_op (INDIRECT_REF, NULL_TREE,
3136 convert (TREE_TYPE (type),
3137 build_unary_op (ADDR_EXPR,
3138 NULL_TREE, expr)));
3139 else
3140 gigi_abort (409);
3141
3142 case COMPLEX_TYPE:
3143 return fold (convert_to_complex (type, expr));
3144
3145 default:
3146 gigi_abort (410);
3147 }
3148 }
3149 \f
3150 /* Remove all conversions that are done in EXP. This includes converting
3151 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3152 is nonzero, always return the address of the containing object even if
3153 the address is not bit-aligned. */
3154
3155 tree
3156 remove_conversions (tree exp, int true_address)
3157 {
3158 switch (TREE_CODE (exp))
3159 {
3160 case CONSTRUCTOR:
3161 if (true_address
3162 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3163 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3164 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3165 break;
3166
3167 case COMPONENT_REF:
3168 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3169 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3170 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3171 break;
3172
3173 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3174 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3175 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3176
3177 default:
3178 break;
3179 }
3180
3181 return exp;
3182 }
3183 \f
3184 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3185 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3186 likewise return an expression pointing to the underlying array. */
3187
3188 tree
3189 maybe_unconstrained_array (tree exp)
3190 {
3191 enum tree_code code = TREE_CODE (exp);
3192 tree new;
3193
3194 switch (TREE_CODE (TREE_TYPE (exp)))
3195 {
3196 case UNCONSTRAINED_ARRAY_TYPE:
3197 if (code == UNCONSTRAINED_ARRAY_REF)
3198 {
3199 new
3200 = build_unary_op (INDIRECT_REF, NULL_TREE,
3201 build_component_ref (TREE_OPERAND (exp, 0),
3202 get_identifier ("P_ARRAY"),
3203 NULL_TREE, 0));
3204 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3205 return new;
3206 }
3207
3208 else if (code == NULL_EXPR)
3209 return build1 (NULL_EXPR,
3210 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3211 (TREE_TYPE (TREE_TYPE (exp))))),
3212 TREE_OPERAND (exp, 0));
3213
3214 else if (code == WITH_RECORD_EXPR
3215 && (TREE_OPERAND (exp, 0)
3216 != (new = maybe_unconstrained_array
3217 (TREE_OPERAND (exp, 0)))))
3218 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3219 TREE_OPERAND (exp, 1));
3220
3221 case RECORD_TYPE:
3222 /* If this is a padded type, convert to the unpadded type and see if
3223 it contains a template. */
3224 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3225 {
3226 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3227 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3228 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3229 return
3230 build_component_ref (new, NULL_TREE,
3231 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3232 0);
3233 }
3234 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3235 return
3236 build_component_ref (exp, NULL_TREE,
3237 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3238 break;
3239
3240 default:
3241 break;
3242 }
3243
3244 return exp;
3245 }
3246 \f
3247 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3248 If NOTRUNC_P is set, truncation operations should be suppressed. */
3249
3250 tree
3251 unchecked_convert (tree type, tree expr, int notrunc_p)
3252 {
3253 tree etype = TREE_TYPE (expr);
3254
3255 /* If the expression is already the right type, we are done. */
3256 if (etype == type)
3257 return expr;
3258
3259 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3260 new one. */
3261 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3262 return build (WITH_RECORD_EXPR, type,
3263 unchecked_convert (type, TREE_OPERAND (expr, 0), notrunc_p),
3264 TREE_OPERAND (expr, 1));
3265
3266 /* If both types types are integral just do a normal conversion.
3267 Likewise for a conversion to an unconstrained array. */
3268 if ((((INTEGRAL_TYPE_P (type)
3269 && ! (TREE_CODE (type) == INTEGER_TYPE
3270 && TYPE_VAX_FLOATING_POINT_P (type)))
3271 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3272 || (TREE_CODE (type) == RECORD_TYPE
3273 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3274 && ((INTEGRAL_TYPE_P (etype)
3275 && ! (TREE_CODE (etype) == INTEGER_TYPE
3276 && TYPE_VAX_FLOATING_POINT_P (etype)))
3277 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3278 || (TREE_CODE (etype) == RECORD_TYPE
3279 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3280 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3281 {
3282 tree rtype = type;
3283
3284 if (TREE_CODE (etype) == INTEGER_TYPE
3285 && TYPE_BIASED_REPRESENTATION_P (etype))
3286 {
3287 tree ntype = copy_type (etype);
3288
3289 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3290 TYPE_MAIN_VARIANT (ntype) = ntype;
3291 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3292 }
3293
3294 if (TREE_CODE (type) == INTEGER_TYPE
3295 && TYPE_BIASED_REPRESENTATION_P (type))
3296 {
3297 rtype = copy_type (type);
3298 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3299 TYPE_MAIN_VARIANT (rtype) = rtype;
3300 }
3301
3302 expr = convert (rtype, expr);
3303 if (type != rtype)
3304 expr = build1 (GNAT_NOP_EXPR, type, expr);
3305 }
3306
3307 /* If we are converting TO an integral type whose precision is not the
3308 same as its size, first unchecked convert to a record that contains
3309 an object of the output type. Then extract the field. */
3310 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3311 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3312 GET_MODE_BITSIZE (TYPE_MODE (type))))
3313 {
3314 tree rec_type = make_node (RECORD_TYPE);
3315 tree field = create_field_decl (get_identifier ("OBJ"), type,
3316 rec_type, 1, 0, 0, 0);
3317
3318 TYPE_FIELDS (rec_type) = field;
3319 layout_type (rec_type);
3320
3321 expr = unchecked_convert (rec_type, expr, notrunc_p);
3322 expr = build_component_ref (expr, NULL_TREE, field, 0);
3323 }
3324
3325 /* Similarly for integral input type whose precision is not equal to its
3326 size. */
3327 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3328 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3329 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3330 {
3331 tree rec_type = make_node (RECORD_TYPE);
3332 tree field
3333 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3334 1, 0, 0, 0);
3335
3336 TYPE_FIELDS (rec_type) = field;
3337 layout_type (rec_type);
3338
3339 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3340 expr = unchecked_convert (type, expr, notrunc_p);
3341 }
3342
3343 /* We have a special case when we are converting between two
3344 unconstrained array types. In that case, take the address,
3345 convert the fat pointer types, and dereference. */
3346 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3347 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3348 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3349 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3350 build_unary_op (ADDR_EXPR, NULL_TREE,
3351 expr)));
3352 else
3353 {
3354 expr = maybe_unconstrained_array (expr);
3355 etype = TREE_TYPE (expr);
3356 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3357 }
3358
3359 /* If the result is an integral type whose size is not equal to
3360 the size of the underlying machine type, sign- or zero-extend
3361 the result. We need not do this in the case where the input is
3362 an integral type of the same precision and signedness or if the output
3363 is a biased type or if both the input and output are unsigned. */
3364 if (! notrunc_p
3365 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3366 && ! (TREE_CODE (type) == INTEGER_TYPE
3367 && TYPE_BIASED_REPRESENTATION_P (type))
3368 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3369 GET_MODE_BITSIZE (TYPE_MODE (type)))
3370 && ! (INTEGRAL_TYPE_P (etype)
3371 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3372 && operand_equal_p (TYPE_RM_SIZE (type),
3373 (TYPE_RM_SIZE (etype) != 0
3374 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3375 0))
3376 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3377 {
3378 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3379 TREE_UNSIGNED (type));
3380 tree shift_expr
3381 = convert (base_type,
3382 size_binop (MINUS_EXPR,
3383 bitsize_int
3384 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3385 TYPE_RM_SIZE (type)));
3386 expr
3387 = convert (type,
3388 build_binary_op (RSHIFT_EXPR, base_type,
3389 build_binary_op (LSHIFT_EXPR, base_type,
3390 convert (base_type, expr),
3391 shift_expr),
3392 shift_expr));
3393 }
3394
3395 /* An unchecked conversion should never raise Constraint_Error. The code
3396 below assumes that GCC's conversion routines overflow the same way that
3397 the underlying hardware does. This is probably true. In the rare case
3398 when it is false, we can rely on the fact that such conversions are
3399 erroneous anyway. */
3400 if (TREE_CODE (expr) == INTEGER_CST)
3401 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3402
3403 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3404 show no longer constant. */
3405 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3406 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3407 TREE_CONSTANT (expr) = 0;
3408
3409 return expr;
3410 }
3411
3412 #include "gt-ada-utils.h"
3413 #include "gtype-ada.h"