Merge from pch-branch up to tag pch-commit-20020603.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
25 * *
26 ****************************************************************************/
27
28 #include "config.h"
29 #include "system.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "stringt.h"
46 #include "uintp.h"
47 #include "fe.h"
48 #include "sinfo.h"
49 #include "einfo.h"
50 #include "ada-tree.h"
51 #include "gigi.h"
52
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
55 #endif
56
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
59 #endif
60
61 /* If nonzero, pretend we are allocating at global level. */
62 int force_global;
63
64 /* Tree nodes for the various types and decls we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
66
67 /* Functions to call for each of the possible raise reasons. */
68 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
69
70 /* Associates a GNAT tree node to a GCC tree node. It is used in
71 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
72 of `save_gnu_tree' for more info. */
73 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
74
75 /* This listhead is used to record any global objects that need elaboration.
76 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
77 initial value to assign. */
78
79 static GTY(()) tree pending_elaborations;
80
81 /* This stack allows us to momentarily switch to generating elaboration
82 lists for an inner context. */
83
84 struct e_stack GTY(()) {
85 struct e_stack *next;
86 tree elab_list;
87 };
88 static GTY(()) struct e_stack *elist_stack;
89
90 /* This variable keeps a table for types for each precision so that we only
91 allocate each of them once. Signed and unsigned types are kept separate.
92
93 Note that these types are only used when fold-const requests something
94 special. Perhaps we should NOT share these types; we'll see how it
95 goes later. */
96 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
97
98 /* Likewise for float types, but record these by mode. */
99 static GTY(()) tree float_types[NUM_MACHINE_MODES];
100
101 /* For each binding contour we allocate a binding_level structure which records
102 the entities defined or declared in that contour. Contours include:
103
104 the global one
105 one for each subprogram definition
106 one for each compound statement (declare block)
107
108 Binding contours are used to create GCC tree BLOCK nodes. */
109
110 struct binding_level GTY(())
111 {
112 /* A chain of ..._DECL nodes for all variables, constants, functions,
113 parameters and type declarations. These ..._DECL nodes are chained
114 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
115 in the reverse of the order supplied to be compatible with the
116 back-end. */
117 tree names;
118 /* For each level (except the global one), a chain of BLOCK nodes for all
119 the levels that were entered and exited one level down from this one. */
120 tree blocks;
121 /* The BLOCK node for this level, if one has been preallocated.
122 If 0, the BLOCK is allocated (if needed) when the level is popped. */
123 tree this_block;
124 /* The binding level containing this one (the enclosing binding level). */
125 struct binding_level *level_chain;
126 };
127
128 /* The binding level currently in effect. */
129 static GTY(()) struct binding_level *current_binding_level;
130
131 /* A chain of binding_level structures awaiting reuse. */
132 static GTY((deletable (""))) struct binding_level *free_binding_level;
133
134 /* The outermost binding level. This binding level is created when the
135 compiler is started and it will exist through the entire compilation. */
136 static struct binding_level *global_binding_level;
137
138 /* Binding level structures are initialized by copying this one. */
139 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
140
141 struct language_function GTY(())
142 {
143 int unused;
144 };
145
146 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
147 static tree compute_related_constant PARAMS ((tree, tree));
148 static tree split_plus PARAMS ((tree, tree *));
149 static int value_zerop PARAMS ((tree));
150 static tree float_type_for_size PARAMS ((int, enum machine_mode));
151 static tree convert_to_fat_pointer PARAMS ((tree, tree));
152 static tree convert_to_thin_pointer PARAMS ((tree, tree));
153 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
154 tree));
155 \f
156 /* Initialize the association of GNAT nodes to GCC trees. */
157
158 void
159 init_gnat_to_gnu ()
160 {
161 Node_Id gnat_node;
162
163 associate_gnat_to_gnu = (tree *) ggc_alloc (max_gnat_nodes * sizeof (tree));
164
165 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
166 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
167
168 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
169 }
170
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
174
175 If GNU_DECL is zero, a previous association is to be reset. */
176
177 void
178 save_gnu_tree (gnat_entity, gnu_decl, no_check)
179 Entity_Id gnat_entity;
180 tree gnu_decl;
181 int no_check;
182 {
183 if (gnu_decl
184 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
185 || (! no_check && ! DECL_P (gnu_decl))))
186 gigi_abort (401);
187
188 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
189 }
190
191 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
192 Return the ..._DECL node that was associated with it. If there is no tree
193 node associated with GNAT_ENTITY, abort.
194
195 In some cases, such as delayed elaboration or expressions that need to
196 be elaborated only once, GNAT_ENTITY is really not an entity. */
197
198 tree
199 get_gnu_tree (gnat_entity)
200 Entity_Id gnat_entity;
201 {
202 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
203 gigi_abort (402);
204
205 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
206 }
207
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
209
210 int
211 present_gnu_tree (gnat_entity)
212 Entity_Id gnat_entity;
213 {
214 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
215 }
216
217 \f
218 /* Return non-zero if we are currently in the global binding level. */
219
220 int
221 global_bindings_p ()
222 {
223 return (force_global != 0 || current_binding_level == global_binding_level
224 ? -1 : 0);
225 }
226
227 /* Return the list of declarations in the current level. Note that this list
228 is in reverse order (it has to be so for back-end compatibility). */
229
230 tree
231 getdecls ()
232 {
233 return current_binding_level->names;
234 }
235
236 /* Nonzero if the current level needs to have a BLOCK made. */
237
238 int
239 kept_level_p ()
240 {
241 return (current_binding_level->names != 0);
242 }
243
244 /* Enter a new binding level. The input parameter is ignored, but has to be
245 specified for back-end compatibility. */
246
247 void
248 pushlevel (ignore)
249 int ignore ATTRIBUTE_UNUSED;
250 {
251 struct binding_level *newlevel = NULL;
252
253 /* Reuse a struct for this binding level, if there is one. */
254 if (free_binding_level)
255 {
256 newlevel = free_binding_level;
257 free_binding_level = free_binding_level->level_chain;
258 }
259 else
260 newlevel
261 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
262
263 *newlevel = clear_binding_level;
264
265 /* Add this level to the front of the chain (stack) of levels that are
266 active. */
267 newlevel->level_chain = current_binding_level;
268 current_binding_level = newlevel;
269 }
270
271 /* Exit a binding level.
272 Pop the level off, and restore the state of the identifier-decl mappings
273 that were in effect when this level was entered.
274
275 If KEEP is nonzero, this level had explicit declarations, so
276 and create a "block" (a BLOCK node) for the level
277 to record its declarations and subblocks for symbol table output.
278
279 If FUNCTIONBODY is nonzero, this level is the body of a function,
280 so create a block as if KEEP were set and also clear out all
281 label names.
282
283 If REVERSE is nonzero, reverse the order of decls before putting
284 them into the BLOCK. */
285
286 tree
287 poplevel (keep, reverse, functionbody)
288 int keep;
289 int reverse;
290 int functionbody;
291 {
292 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
293 binding level that we are about to exit and which is returned by this
294 routine. */
295 tree block = NULL_TREE;
296 tree decl_chain;
297 tree decl_node;
298 tree subblock_chain = current_binding_level->blocks;
299 tree subblock_node;
300 int block_previously_created;
301
302 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
303 nodes chained through the `names' field of current_binding_level are in
304 reverse order except for PARM_DECL node, which are explicitly stored in
305 the right order. */
306 current_binding_level->names
307 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
308 : current_binding_level->names;
309
310 /* Output any nested inline functions within this block which must be
311 compiled because their address is needed. */
312 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
313 if (TREE_CODE (decl_node) == FUNCTION_DECL
314 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
315 && DECL_INITIAL (decl_node) != 0)
316 {
317 push_function_context ();
318 output_inline_function (decl_node);
319 pop_function_context ();
320 }
321
322 block = 0;
323 block_previously_created = (current_binding_level->this_block != 0);
324 if (block_previously_created)
325 block = current_binding_level->this_block;
326 else if (keep || functionbody)
327 block = make_node (BLOCK);
328 if (block != 0)
329 {
330 BLOCK_VARS (block) = keep ? decl_chain : 0;
331 BLOCK_SUBBLOCKS (block) = subblock_chain;
332 }
333
334 /* Record the BLOCK node just built as the subblock its enclosing scope. */
335 for (subblock_node = subblock_chain; subblock_node;
336 subblock_node = TREE_CHAIN (subblock_node))
337 BLOCK_SUPERCONTEXT (subblock_node) = block;
338
339 /* Clear out the meanings of the local variables of this level. */
340
341 for (subblock_node = decl_chain; subblock_node;
342 subblock_node = TREE_CHAIN (subblock_node))
343 if (DECL_NAME (subblock_node) != 0)
344 /* If the identifier was used or addressed via a local extern decl,
345 don't forget that fact. */
346 if (DECL_EXTERNAL (subblock_node))
347 {
348 if (TREE_USED (subblock_node))
349 TREE_USED (DECL_NAME (subblock_node)) = 1;
350 if (TREE_ADDRESSABLE (subblock_node))
351 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
352 }
353
354 {
355 /* Pop the current level, and free the structure for reuse. */
356 struct binding_level *level = current_binding_level;
357 current_binding_level = current_binding_level->level_chain;
358 level->level_chain = free_binding_level;
359 free_binding_level = level;
360 }
361
362 if (functionbody)
363 {
364 /* This is the top level block of a function. The ..._DECL chain stored
365 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
366 leave them in the BLOCK because they are found in the FUNCTION_DECL
367 instead. */
368 DECL_INITIAL (current_function_decl) = block;
369 BLOCK_VARS (block) = 0;
370 }
371 else if (block)
372 {
373 if (!block_previously_created)
374 current_binding_level->blocks
375 = chainon (current_binding_level->blocks, block);
376 }
377
378 /* If we did not make a block for the level just exited, any blocks made for
379 inner levels (since they cannot be recorded as subblocks in that level)
380 must be carried forward so they will later become subblocks of something
381 else. */
382 else if (subblock_chain)
383 current_binding_level->blocks
384 = chainon (current_binding_level->blocks, subblock_chain);
385 if (block)
386 TREE_USED (block) = 1;
387
388 return block;
389 }
390 \f
391 /* Insert BLOCK at the end of the list of subblocks of the
392 current binding level. This is used when a BIND_EXPR is expanded,
393 to handle the BLOCK node inside the BIND_EXPR. */
394
395 void
396 insert_block (block)
397 tree block;
398 {
399 TREE_USED (block) = 1;
400 current_binding_level->blocks
401 = chainon (current_binding_level->blocks, block);
402 }
403
404 /* Set the BLOCK node for the innermost scope
405 (the one we are currently in). */
406
407 void
408 set_block (block)
409 tree block;
410 {
411 current_binding_level->this_block = block;
412 current_binding_level->names = chainon (current_binding_level->names,
413 BLOCK_VARS (block));
414 current_binding_level->blocks = chainon (current_binding_level->blocks,
415 BLOCK_SUBBLOCKS (block));
416 }
417
418 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
419 Returns the ..._DECL node. */
420
421 tree
422 pushdecl (decl)
423 tree decl;
424 {
425 struct binding_level *b;
426
427 /* If at top level, there is no context. But PARM_DECLs always go in the
428 level of its function. */
429 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
430 {
431 b = global_binding_level;
432 DECL_CONTEXT (decl) = 0;
433 }
434 else
435 {
436 b = current_binding_level;
437 DECL_CONTEXT (decl) = current_function_decl;
438 }
439
440 /* Put the declaration on the list. The list of declarations is in reverse
441 order. The list will be reversed later if necessary. This needs to be
442 this way for compatibility with the back-end.
443
444 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
445 will cause trouble with the debugger and aren't needed anyway. */
446 if (TREE_CODE (decl) != TYPE_DECL
447 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
448 {
449 TREE_CHAIN (decl) = b->names;
450 b->names = decl;
451 }
452
453 /* For the declaration of a type, set its name if it either is not already
454 set, was set to an IDENTIFIER_NODE, indicating an internal name,
455 or if the previous type name was not derived from a source name.
456 We'd rather have the type named with a real name and all the pointer
457 types to the same object have the same POINTER_TYPE node. Code in this
458 function in c-decl.c makes a copy of the type node here, but that may
459 cause us trouble with incomplete types, so let's not try it (at least
460 for now). */
461
462 if (TREE_CODE (decl) == TYPE_DECL
463 && DECL_NAME (decl) != 0
464 && (TYPE_NAME (TREE_TYPE (decl)) == 0
465 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
466 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
467 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
468 && ! DECL_ARTIFICIAL (decl))))
469 TYPE_NAME (TREE_TYPE (decl)) = decl;
470
471 return decl;
472 }
473 \f
474 /* Do little here. Set up the standard declarations later after the
475 front end has been run. */
476
477 void
478 gnat_init_decl_processing ()
479 {
480 lineno = 0;
481
482 /* Make the binding_level structure for global names. */
483 current_function_decl = 0;
484 current_binding_level = 0;
485 free_binding_level = 0;
486 pushlevel (0);
487 global_binding_level = current_binding_level;
488
489 build_common_tree_nodes (0);
490
491 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
492 corresponding to the size of ptr_mode. Make this here since we need
493 this before we can expand the GNAT types. */
494 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
495 build_common_tree_nodes_2 (0);
496
497 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
498
499 /* We need to make the integer type before doing anything else.
500 We stitch this in to the appropriate GNAT type later. */
501 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
502 integer_type_node));
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
504 char_type_node));
505
506 ptr_void_type_node = build_pointer_type (void_type_node);
507
508 }
509
510 /* Create the predefined scalar types such as `integer_type_node' needed
511 in the gcc back-end and initialize the global binding level. */
512
513 void
514 init_gigi_decls (long_long_float_type, exception_type)
515 tree long_long_float_type, exception_type;
516 {
517 tree endlink, decl;
518 unsigned int i;
519
520 /* Set the types that GCC and Gigi use from the front end. We would like
521 to do this for char_type_node, but it needs to correspond to the C
522 char type. */
523 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
524 {
525 /* In this case, the builtin floating point types are VAX float,
526 so make up a type for use. */
527 longest_float_type_node = make_node (REAL_TYPE);
528 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
529 layout_type (longest_float_type_node);
530 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
531 longest_float_type_node));
532 }
533 else
534 longest_float_type_node = TREE_TYPE (long_long_float_type);
535
536 except_type_node = TREE_TYPE (exception_type);
537
538 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
539 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
540 unsigned_type_node));
541
542 void_type_decl_node
543 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
544 void_type_node));
545
546 void_ftype = build_function_type (void_type_node, NULL_TREE);
547 ptr_void_ftype = build_pointer_type (void_ftype);
548
549 /* Now declare runtime functions. */
550 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
551
552 /* malloc is a function declaration tree for a function to allocate
553 memory. */
554 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
555 NULL_TREE,
556 build_function_type (ptr_void_type_node,
557 tree_cons (NULL_TREE,
558 sizetype,
559 endlink)),
560 NULL_TREE, 0, 1, 1, 0);
561
562 /* free is a function declaration tree for a function to free memory. */
563
564 free_decl
565 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
566 build_function_type (void_type_node,
567 tree_cons (NULL_TREE,
568 ptr_void_type_node,
569 endlink)),
570 NULL_TREE, 0, 1, 1, 0);
571
572 /* Make the types and functions used for exception processing. */
573 jmpbuf_type
574 = build_array_type (gnat_type_for_mode (Pmode, 0),
575 build_index_type (build_int_2 (5, 0)));
576 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
577 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
578
579 /* Functions to get and set the jumpbuf pointer for the current thread. */
580 get_jmpbuf_decl
581 = create_subprog_decl
582 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
583 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
584 NULL_TREE, 0, 1, 1, 0);
585
586 set_jmpbuf_decl
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
589 NULL_TREE,
590 build_function_type (void_type_node,
591 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
592 NULL_TREE, 0, 1, 1, 0);
593
594 /* Function to get the current exception. */
595 get_excptr_decl
596 = create_subprog_decl
597 (get_identifier ("system__soft_links__get_gnat_exception"),
598 NULL_TREE,
599 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
600 NULL_TREE, 0, 1, 1, 0);
601
602 /* Functions that raise exceptions. */
603 raise_nodefer_decl
604 = create_subprog_decl
605 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
606 build_function_type (void_type_node,
607 tree_cons (NULL_TREE,
608 build_pointer_type (except_type_node),
609 endlink)),
610 NULL_TREE, 0, 1, 1, 0);
611
612 /* If in no exception handlers mode, all raise statements are redirected to
613 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
614 this procedure will never be called in this mode. */
615 if (No_Exception_Handlers_Set ())
616 {
617 decl
618 = create_subprog_decl
619 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
620 build_function_type (void_type_node,
621 tree_cons (NULL_TREE,
622 build_pointer_type (char_type_node),
623 tree_cons (NULL_TREE,
624 integer_type_node,
625 endlink))),
626 NULL_TREE, 0, 1, 1, 0);
627
628 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
629 i++)
630 gnat_raise_decls[i] = decl;
631 }
632 else
633 /* Otherwise, make one decl for each exception reason. */
634 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
635 {
636 char name[17];
637
638 sprintf (name, "__gnat_rcheck_%.2d", i);
639 gnat_raise_decls[i]
640 = create_subprog_decl
641 (get_identifier (name), NULL_TREE,
642 build_function_type (void_type_node,
643 tree_cons (NULL_TREE,
644 build_pointer_type
645 (char_type_node),
646 tree_cons (NULL_TREE,
647 integer_type_node,
648 endlink))),
649 NULL_TREE, 0, 1, 1, 0);
650 }
651
652 /* Indicate that these never return. */
653 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
654 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
655 TREE_TYPE (raise_nodefer_decl)
656 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
657 TYPE_QUAL_VOLATILE);
658
659 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
660 {
661 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
662 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
663 TREE_TYPE (gnat_raise_decls[i])
664 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
665 TYPE_QUAL_VOLATILE);
666 }
667
668 /* setjmp returns an integer and has one operand, which is a pointer to
669 a jmpbuf. */
670 setjmp_decl
671 = create_subprog_decl
672 (get_identifier ("setjmp"), NULL_TREE,
673 build_function_type (integer_type_node,
674 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
675 NULL_TREE, 0, 1, 1, 0);
676
677 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
678 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
679
680 main_identifier_node = get_identifier ("main");
681 }
682 \f
683 /* This function is called indirectly from toplev.c to handle incomplete
684 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
685 compile_file in toplev.c makes an indirect call through the function pointer
686 incomplete_decl_finalize_hook which is initialized to this routine in
687 init_decl_processing. */
688
689 void
690 gnat_finish_incomplete_decl (dont_care)
691 tree dont_care ATTRIBUTE_UNUSED;
692 {
693 gigi_abort (405);
694 }
695 \f
696 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
697 nodes (FIELDLIST), finish constructing the record or union type.
698 If HAS_REP is nonzero, this record has a rep clause; don't call
699 layout_type but merely set the size and alignment ourselves.
700 If DEFER_DEBUG is nonzero, do not call the debugging routines
701 on this type; it will be done later. */
702
703 void
704 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
705 tree record_type;
706 tree fieldlist;
707 int has_rep;
708 int defer_debug;
709 {
710 enum tree_code code = TREE_CODE (record_type);
711 tree ada_size = bitsize_zero_node;
712 tree size = bitsize_zero_node;
713 tree size_unit = size_zero_node;
714 int var_size = 0;
715 tree field;
716
717 TYPE_FIELDS (record_type) = fieldlist;
718
719 if (TYPE_NAME (record_type) != 0
720 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
721 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
722 else
723 TYPE_STUB_DECL (record_type)
724 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
725 record_type));
726
727 /* We don't need both the typedef name and the record name output in
728 the debugging information, since they are the same. */
729 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
730
731 /* Globally initialize the record first. If this is a rep'ed record,
732 that just means some initializations; otherwise, layout the record. */
733
734 if (has_rep)
735 {
736 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
737 TYPE_MODE (record_type) = BLKmode;
738 if (TYPE_SIZE (record_type) == 0)
739 {
740 TYPE_SIZE (record_type) = bitsize_zero_node;
741 TYPE_SIZE_UNIT (record_type) = size_zero_node;
742 }
743 }
744 else
745 {
746 /* Ensure there isn't a size already set. There can be in an error
747 case where there is a rep clause but all fields have errors and
748 no longer have a position. */
749 TYPE_SIZE (record_type) = 0;
750 layout_type (record_type);
751 }
752
753 /* At this point, the position and size of each field is known. It was
754 either set before entry by a rep clause, or by laying out the type
755 above. We now make a pass through the fields (in reverse order for
756 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
757 (for rep'ed records that are not padding types); and the mode (for
758 rep'ed records). */
759
760 if (code == QUAL_UNION_TYPE)
761 fieldlist = nreverse (fieldlist);
762
763 for (field = fieldlist; field; field = TREE_CHAIN (field))
764 {
765 tree type = TREE_TYPE (field);
766 tree this_size = DECL_SIZE (field);
767 tree this_size_unit = DECL_SIZE_UNIT (field);
768 tree this_ada_size = DECL_SIZE (field);
769
770 /* We need to make an XVE/XVU record if any field has variable size,
771 whether or not the record does. For example, if we have an union,
772 it may be that all fields, rounded up to the alignment, have the
773 same size, in which case we'll use that size. But the debug
774 output routines (except Dwarf2) won't be able to output the fields,
775 so we need to make the special record. */
776 if (TREE_CODE (this_size) != INTEGER_CST)
777 var_size = 1;
778
779 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
780 || TREE_CODE (type) == QUAL_UNION_TYPE)
781 && ! TYPE_IS_FAT_POINTER_P (type)
782 && ! TYPE_CONTAINS_TEMPLATE_P (type)
783 && TYPE_ADA_SIZE (type) != 0)
784 this_ada_size = TYPE_ADA_SIZE (type);
785
786 if (has_rep && ! DECL_BIT_FIELD (field))
787 TYPE_ALIGN (record_type)
788 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
789
790 switch (code)
791 {
792 case UNION_TYPE:
793 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
794 size = size_binop (MAX_EXPR, size, this_size);
795 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
796 break;
797
798 case QUAL_UNION_TYPE:
799 ada_size
800 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
801 this_ada_size, ada_size));
802 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
803 this_size, size));
804 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
805 this_size_unit, size_unit));
806 break;
807
808 case RECORD_TYPE:
809 /* Since we know here that all fields are sorted in order of
810 increasing bit position, the size of the record is one
811 higher than the ending bit of the last field processed
812 unless we have a rep clause, since in that case we might
813 have a field outside a QUAL_UNION_TYPE that has a higher ending
814 position. So use a MAX in that case. Also, if this field is a
815 QUAL_UNION_TYPE, we need to take into account the previous size in
816 the case of empty variants. */
817 ada_size
818 = merge_sizes (ada_size, bit_position (field), this_ada_size,
819 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
820 size = merge_sizes (size, bit_position (field), this_size,
821 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
822 size_unit
823 = merge_sizes (size_unit, byte_position (field), this_size_unit,
824 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
825 break;
826
827 default:
828 abort ();
829 }
830 }
831
832 if (code == QUAL_UNION_TYPE)
833 nreverse (fieldlist);
834
835 /* If this is a padding record, we never want to make the size smaller than
836 what was specified in it, if any. */
837 if (TREE_CODE (record_type) == RECORD_TYPE
838 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
839 {
840 size = TYPE_SIZE (record_type);
841 size_unit = TYPE_SIZE_UNIT (record_type);
842 }
843
844 /* Now set any of the values we've just computed that apply. */
845 if (! TYPE_IS_FAT_POINTER_P (record_type)
846 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
847 SET_TYPE_ADA_SIZE (record_type, ada_size);
848
849 #ifdef ROUND_TYPE_SIZE
850 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
851 size_unit = ROUND_TYPE_SIZE_UNIT (record_type, size_unit,
852 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
853 #else
854 size = round_up (size, TYPE_ALIGN (record_type));
855 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
856 #endif
857
858 if (has_rep
859 && ! (TREE_CODE (record_type) == RECORD_TYPE
860 && TYPE_IS_PADDING_P (record_type)
861 && TREE_CODE (size) != INTEGER_CST
862 && contains_placeholder_p (size)))
863 {
864 TYPE_SIZE (record_type) = size;
865 TYPE_SIZE_UNIT (record_type) = size_unit;
866 }
867
868 if (has_rep)
869 compute_record_mode (record_type);
870
871 if (! defer_debug)
872 {
873 /* If this record is of variable size, rename it so that the
874 debugger knows it is and make a new, parallel, record
875 that tells the debugger how the record is laid out. See
876 exp_dbug.ads. */
877 if (var_size)
878 {
879 tree new_record_type
880 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
881 ? UNION_TYPE : TREE_CODE (record_type));
882 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
883 tree new_id
884 = concat_id_with_name (orig_id,
885 TREE_CODE (record_type) == QUAL_UNION_TYPE
886 ? "XVU" : "XVE");
887 tree last_pos = bitsize_zero_node;
888 tree old_field;
889
890 TYPE_NAME (new_record_type) = new_id;
891 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
892 TYPE_STUB_DECL (new_record_type)
893 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
894 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
895 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
896 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
897 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
898
899 /* Now scan all the fields, replacing each field with a new
900 field corresponding to the new encoding. */
901 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
902 old_field = TREE_CHAIN (old_field))
903 {
904 tree field_type = TREE_TYPE (old_field);
905 tree field_name = DECL_NAME (old_field);
906 tree new_field;
907 tree curpos = bit_position (old_field);
908 int var = 0;
909 unsigned int align = 0;
910 tree pos;
911
912 /* See how the position was modified from the last position.
913
914 There are two basic cases we support: a value was added
915 to the last position or the last position was rounded to
916 a boundary and they something was added. Check for the
917 first case first. If not, see if there is any evidence
918 of rounding. If so, round the last position and try
919 again.
920
921 If this is a union, the position can be taken as zero. */
922
923 if (TREE_CODE (new_record_type) == UNION_TYPE)
924 pos = bitsize_zero_node, align = 0;
925 else
926 pos = compute_related_constant (curpos, last_pos);
927
928 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
929 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
930 {
931 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
932 pos = compute_related_constant (curpos,
933 round_up (last_pos, align));
934 }
935 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
936 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
937 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
938 && host_integerp (TREE_OPERAND
939 (TREE_OPERAND (curpos, 0), 1),
940 1))
941 {
942 align
943 = tree_low_cst
944 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
945 pos = compute_related_constant (curpos,
946 round_up (last_pos, align));
947 }
948
949 /* If we can't compute a position, set it to zero.
950
951 ??? We really should abort here, but it's too much work
952 to get this correct for all cases. */
953
954 if (pos == 0)
955 pos = bitsize_zero_node;
956
957 /* See if this type is variable-size and make a new type
958 and indicate the indirection if so. */
959 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
960 {
961 field_type = build_pointer_type (field_type);
962 var = 1;
963 }
964
965 /* Make a new field name, if necessary. */
966 if (var || align != 0)
967 {
968 char suffix[6];
969
970 if (align != 0)
971 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
972 align / BITS_PER_UNIT);
973 else
974 strcpy (suffix, "XVL");
975
976 field_name = concat_id_with_name (field_name, suffix);
977 }
978
979 new_field = create_field_decl (field_name, field_type,
980 new_record_type, 0,
981 DECL_SIZE (old_field), pos, 0);
982 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
983 TYPE_FIELDS (new_record_type) = new_field;
984
985 /* If old_field is a QUAL_UNION_TYPE, take its size as being
986 zero. The only time it's not the last field of the record
987 is when there are other components at fixed positions after
988 it (meaning there was a rep clause for every field) and we
989 want to be able to encode them. */
990 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
991 (TREE_CODE (TREE_TYPE (old_field))
992 == QUAL_UNION_TYPE)
993 ? bitsize_zero_node
994 : DECL_SIZE (old_field));
995 }
996
997 TYPE_FIELDS (new_record_type)
998 = nreverse (TYPE_FIELDS (new_record_type));
999
1000 rest_of_type_compilation (new_record_type, global_bindings_p ());
1001 }
1002
1003 rest_of_type_compilation (record_type, global_bindings_p ());
1004 }
1005 }
1006
1007 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1008 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1009 if this represents a QUAL_UNION_TYPE in which case we must look for
1010 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1011 is nonzero, we must take the MAX of the end position of this field
1012 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1013
1014 We return an expression for the size. */
1015
1016 static tree
1017 merge_sizes (last_size, first_bit, size, special, has_rep)
1018 tree last_size;
1019 tree first_bit, size;
1020 int special;
1021 int has_rep;
1022 {
1023 tree type = TREE_TYPE (last_size);
1024
1025 if (! special || TREE_CODE (size) != COND_EXPR)
1026 {
1027 tree new = size_binop (PLUS_EXPR, first_bit, size);
1028
1029 if (has_rep)
1030 new = size_binop (MAX_EXPR, last_size, new);
1031
1032 return new;
1033 }
1034
1035 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1036 integer_zerop (TREE_OPERAND (size, 1))
1037 ? last_size : merge_sizes (last_size, first_bit,
1038 TREE_OPERAND (size, 1),
1039 1, has_rep),
1040 integer_zerop (TREE_OPERAND (size, 2))
1041 ? last_size : merge_sizes (last_size, first_bit,
1042 TREE_OPERAND (size, 2),
1043 1, has_rep)));
1044 }
1045
1046 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1047 related by the addition of a constant. Return that constant if so. */
1048
1049 static tree
1050 compute_related_constant (op0, op1)
1051 tree op0, op1;
1052 {
1053 tree op0_var, op1_var;
1054 tree op0_con = split_plus (op0, &op0_var);
1055 tree op1_con = split_plus (op1, &op1_var);
1056 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1057
1058 if (operand_equal_p (op0_var, op1_var, 0))
1059 return result;
1060 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1061 return result;
1062 else
1063 return 0;
1064 }
1065
1066 /* Utility function of above to split a tree OP which may be a sum, into a
1067 constant part, which is returned, and a variable part, which is stored
1068 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1069 sizetype. */
1070
1071 static tree
1072 split_plus (in, pvar)
1073 tree in;
1074 tree *pvar;
1075 {
1076 tree result = bitsize_zero_node;
1077
1078 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1079 in = TREE_OPERAND (in, 0);
1080
1081 *pvar = in;
1082 if (TREE_CODE (in) == INTEGER_CST)
1083 {
1084 *pvar = bitsize_zero_node;
1085 return in;
1086 }
1087 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1088 {
1089 tree lhs_var, rhs_var;
1090 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1091 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1092
1093 result = size_binop (PLUS_EXPR, result, lhs_con);
1094 result = size_binop (TREE_CODE (in), result, rhs_con);
1095
1096 if (lhs_var == TREE_OPERAND (in, 0)
1097 && rhs_var == TREE_OPERAND (in, 1))
1098 return bitsize_zero_node;
1099
1100 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1101 return result;
1102 }
1103 else
1104 return bitsize_zero_node;
1105 }
1106 \f
1107 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1108 subprogram. If it is void_type_node, then we are dealing with a procedure,
1109 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1110 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1111 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1112 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1113 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1114 RETURNS_WITH_DSP is nonzero if the function is to return with a
1115 depressed stack pointer. */
1116
1117 tree
1118 create_subprog_type (return_type, param_decl_list, cico_list,
1119 returns_unconstrained, returns_by_ref, returns_with_dsp)
1120 tree return_type;
1121 tree param_decl_list;
1122 tree cico_list;
1123 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1124 {
1125 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1126 the subprogram formal parameters. This list is generated by traversing the
1127 input list of PARM_DECL nodes. */
1128 tree param_type_list = NULL;
1129 tree param_decl;
1130 tree type;
1131
1132 for (param_decl = param_decl_list; param_decl;
1133 param_decl = TREE_CHAIN (param_decl))
1134 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1135 param_type_list);
1136
1137 /* The list of the function parameter types has to be terminated by the void
1138 type to signal to the back-end that we are not dealing with a variable
1139 parameter subprogram, but that the subprogram has a fixed number of
1140 parameters. */
1141 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1142
1143 /* The list of argument types has been created in reverse
1144 so nreverse it. */
1145 param_type_list = nreverse (param_type_list);
1146
1147 type = build_function_type (return_type, param_type_list);
1148
1149 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1150 or the new type should, make a copy of TYPE. Likewise for
1151 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1152 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1153 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1154 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1155 type = copy_type (type);
1156
1157 SET_TYPE_CI_CO_LIST (type, cico_list);
1158 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1159 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1160 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1161 return type;
1162 }
1163 \f
1164 /* Return a copy of TYPE but safe to modify in any way. */
1165
1166 tree
1167 copy_type (type)
1168 tree type;
1169 {
1170 tree new = copy_node (type);
1171
1172 /* copy_node clears this field instead of copying it, because it is
1173 aliased with TREE_CHAIN. */
1174 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1175
1176 TYPE_POINTER_TO (new) = 0;
1177 TYPE_REFERENCE_TO (new) = 0;
1178 TYPE_MAIN_VARIANT (new) = new;
1179 TYPE_NEXT_VARIANT (new) = 0;
1180
1181 return new;
1182 }
1183 \f
1184 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1185 TYPE_INDEX_TYPE is INDEX. */
1186
1187 tree
1188 create_index_type (min, max, index)
1189 tree min, max;
1190 tree index;
1191 {
1192 /* First build a type for the desired range. */
1193 tree type = build_index_2_type (min, max);
1194
1195 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1196 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1197 is set, but not to INDEX, make a copy of this type with the requested
1198 index type. Note that we have no way of sharing these types, but that's
1199 only a small hole. */
1200 if (TYPE_INDEX_TYPE (type) == index)
1201 return type;
1202 else if (TYPE_INDEX_TYPE (type) != 0)
1203 type = copy_type (type);
1204
1205 SET_TYPE_INDEX_TYPE (type, index);
1206 return type;
1207 }
1208 \f
1209 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1210 string) and TYPE is a ..._TYPE node giving its data type.
1211 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1212 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1213 information about this type. */
1214
1215 tree
1216 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1217 tree type_name;
1218 tree type;
1219 struct attrib *attr_list;
1220 int artificial_p;
1221 int debug_info_p;
1222 {
1223 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1224 enum tree_code code = TREE_CODE (type);
1225
1226 DECL_ARTIFICIAL (type_decl) = artificial_p;
1227 pushdecl (type_decl);
1228 process_attributes (type_decl, attr_list);
1229
1230 /* Pass type declaration information to the debugger unless this is an
1231 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1232 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1233 a dummy type, which will be completed later, or a type for which
1234 debugging information was not requested. */
1235 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1236 || ! debug_info_p)
1237 DECL_IGNORED_P (type_decl) = 1;
1238 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1239 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1240 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1241 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1242
1243 return type_decl;
1244 }
1245
1246 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1247 ASM_NAME is its assembler name (if provided). TYPE is its data type
1248 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1249 expression; NULL_TREE if none.
1250
1251 CONST_FLAG is nonzero if this variable is constant.
1252
1253 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1254 the current compilation unit. This flag should be set when processing the
1255 variable definitions in a package specification. EXTERN_FLAG is nonzero
1256 when processing an external variable declaration (as opposed to a
1257 definition: no storage is to be allocated for the variable here).
1258
1259 STATIC_FLAG is only relevant when not at top level. In that case
1260 it indicates whether to always allocate storage to the variable. */
1261
1262 tree
1263 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1264 extern_flag, static_flag, attr_list)
1265 tree var_name;
1266 tree asm_name;
1267 tree type;
1268 tree var_init;
1269 int const_flag;
1270 int public_flag;
1271 int extern_flag;
1272 int static_flag;
1273 struct attrib *attr_list;
1274 {
1275 int init_const
1276 = (var_init == 0
1277 ? 0
1278 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1279 && (global_bindings_p () || static_flag
1280 ? 0 != initializer_constant_valid_p (var_init,
1281 TREE_TYPE (var_init))
1282 : TREE_CONSTANT (var_init))));
1283 tree var_decl
1284 = build_decl ((const_flag && init_const
1285 /* Only make a CONST_DECL for sufficiently-small objects.
1286 We consider complex double "sufficiently-small" */
1287 && TYPE_SIZE (type) != 0
1288 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1289 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1290 GET_MODE_SIZE (DCmode)))
1291 ? CONST_DECL : VAR_DECL, var_name, type);
1292 tree assign_init = 0;
1293
1294 /* If this is external, throw away any initializations unless this is a
1295 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1296 we are defining a global here, leave a constant initialization and save
1297 any variable elaborations for the elaboration routine. Otherwise, if
1298 the initializing expression is not the same as TYPE, generate the
1299 initialization with an assignment statement, since it knows how
1300 to do the required adjustents. If we are just annotating types,
1301 throw away the initialization if it isn't a constant. */
1302
1303 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1304 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1305 var_init = 0;
1306
1307 if (global_bindings_p () && var_init != 0 && ! init_const)
1308 {
1309 add_pending_elaborations (var_decl, var_init);
1310 var_init = 0;
1311 }
1312
1313 else if (var_init != 0
1314 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1315 != TYPE_MAIN_VARIANT (type))
1316 || (static_flag && ! init_const)))
1317 assign_init = var_init, var_init = 0;
1318
1319 DECL_COMMON (var_decl) = !flag_no_common;
1320 DECL_INITIAL (var_decl) = var_init;
1321 TREE_READONLY (var_decl) = const_flag;
1322 DECL_EXTERNAL (var_decl) = extern_flag;
1323 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1324 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1325 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1326 = TYPE_VOLATILE (type);
1327
1328 /* At the global binding level we need to allocate static storage for the
1329 variable if and only if its not external. If we are not at the top level
1330 we allocate automatic storage unless requested not to. */
1331 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1332
1333 if (asm_name != 0)
1334 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1335
1336 process_attributes (var_decl, attr_list);
1337
1338 /* Add this decl to the current binding level and generate any
1339 needed code and RTL. */
1340 var_decl = pushdecl (var_decl);
1341 expand_decl (var_decl);
1342
1343 if (DECL_CONTEXT (var_decl) != 0)
1344 expand_decl_init (var_decl);
1345
1346 /* If this is volatile, force it into memory. */
1347 if (TREE_SIDE_EFFECTS (var_decl))
1348 gnat_mark_addressable (var_decl);
1349
1350 if (TREE_CODE (var_decl) != CONST_DECL)
1351 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1352
1353 if (assign_init != 0)
1354 {
1355 /* If VAR_DECL has a padded type, convert it to the unpadded
1356 type so the assignment is done properly. */
1357 tree lhs = var_decl;
1358
1359 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1360 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1361 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1362
1363 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1364 assign_init));
1365 }
1366
1367 return var_decl;
1368 }
1369 \f
1370 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1371 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1372 this field is in a record type with a "pragma pack". If SIZE is nonzero
1373 it is the specified size for this field. If POS is nonzero, it is the bit
1374 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1375 the address of this field for aliasing purposes. */
1376
1377 tree
1378 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1379 addressable)
1380 tree field_name;
1381 tree field_type;
1382 tree record_type;
1383 int packed;
1384 tree size, pos;
1385 int addressable;
1386 {
1387 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1388
1389 DECL_CONTEXT (field_decl) = record_type;
1390 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1391
1392 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1393 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1394 If it is a padding type where the inner field is of variable size, it
1395 must be at its natural alignment. Just handle the packed case here; we
1396 will disallow non-aligned rep clauses elsewhere. */
1397 if (packed && TYPE_MODE (field_type) == BLKmode)
1398 DECL_ALIGN (field_decl)
1399 = ((TREE_CODE (field_type) == RECORD_TYPE
1400 && TYPE_IS_PADDING_P (field_type)
1401 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1402 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1403
1404 /* If a size is specified, use it. Otherwise, see if we have a size
1405 to use that may differ from the natural size of the object. */
1406 if (size != 0)
1407 size = convert (bitsizetype, size);
1408 else if (packed)
1409 {
1410 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1411 TYPE_SIZE (field_type), 0))
1412 size = rm_size (field_type);
1413
1414 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1415 byte. */
1416 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1417 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1418 size = round_up (size, BITS_PER_UNIT);
1419 }
1420
1421 /* Make a bitfield if a size is specified for two reasons: first if the size
1422 differs from the natural size. Second, if the alignment is insufficient.
1423 There are a number of ways the latter can be true. But never make a
1424 bitfield if the type of the field has a nonconstant size. */
1425
1426 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1427 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1428 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1429 || (pos != 0
1430 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1431 bitsize_int (TYPE_ALIGN
1432 (field_type)))))
1433 || packed
1434 || (TYPE_ALIGN (record_type) != 0
1435 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1436 {
1437 DECL_BIT_FIELD (field_decl) = 1;
1438 DECL_SIZE (field_decl) = size;
1439 if (! packed && pos == 0)
1440 DECL_ALIGN (field_decl)
1441 = (TYPE_ALIGN (record_type) != 0
1442 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1443 : TYPE_ALIGN (field_type));
1444 }
1445
1446 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1447 DECL_ALIGN (field_decl)
1448 = MAX (DECL_ALIGN (field_decl),
1449 DECL_BIT_FIELD (field_decl) ? 1
1450 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1451 : TYPE_ALIGN (field_type));
1452
1453 if (pos != 0)
1454 {
1455 /* We need to pass in the alignment the DECL is known to have.
1456 This is the lowest-order bit set in POS, but no more than
1457 the alignment of the record, if one is specified. Note
1458 that an alignment of 0 is taken as infinite. */
1459 unsigned int known_align;
1460
1461 if (host_integerp (pos, 1))
1462 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1463 else
1464 known_align = BITS_PER_UNIT;
1465
1466 if (TYPE_ALIGN (record_type)
1467 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1468 known_align = TYPE_ALIGN (record_type);
1469
1470 layout_decl (field_decl, known_align);
1471 SET_DECL_OFFSET_ALIGN (field_decl,
1472 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1473 : BITS_PER_UNIT);
1474 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1475 &DECL_FIELD_BIT_OFFSET (field_decl),
1476 DECL_OFFSET_ALIGN (field_decl), pos);
1477
1478 DECL_HAS_REP_P (field_decl) = 1;
1479 }
1480
1481 /* If the field type is passed by reference, we will have pointers to the
1482 field, so it is addressable. */
1483 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1484 addressable = 1;
1485
1486 /* Mark the decl as nonaddressable if it either is indicated so semantically
1487 or if it is a bit field. */
1488 DECL_NONADDRESSABLE_P (field_decl)
1489 = ! addressable || DECL_BIT_FIELD (field_decl);
1490
1491 return field_decl;
1492 }
1493
1494 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1495 effects, has the value of zero. */
1496
1497 static int
1498 value_zerop (exp)
1499 tree exp;
1500 {
1501 if (TREE_CODE (exp) == COMPOUND_EXPR)
1502 return value_zerop (TREE_OPERAND (exp, 1));
1503
1504 return integer_zerop (exp);
1505 }
1506 \f
1507 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1508 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1509 readonly (either an IN parameter or an address of a pass-by-ref
1510 parameter). */
1511
1512 tree
1513 create_param_decl (param_name, param_type, readonly)
1514 tree param_name;
1515 tree param_type;
1516 int readonly;
1517 {
1518 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1519
1520 DECL_ARG_TYPE (param_decl) = param_type;
1521 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1522 TREE_READONLY (param_decl) = readonly;
1523 return param_decl;
1524 }
1525 \f
1526 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1527
1528 void
1529 process_attributes (decl, attr_list)
1530 tree decl;
1531 struct attrib *attr_list;
1532 {
1533 for (; attr_list; attr_list = attr_list->next)
1534 switch (attr_list->type)
1535 {
1536 case ATTR_MACHINE_ATTRIBUTE:
1537 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1538 NULL_TREE),
1539 ATTR_FLAG_TYPE_IN_PLACE);
1540 break;
1541
1542 case ATTR_LINK_ALIAS:
1543 TREE_STATIC (decl) = 1;
1544 assemble_alias (decl, attr_list->name);
1545 break;
1546
1547 case ATTR_WEAK_EXTERNAL:
1548 if (SUPPORTS_WEAK)
1549 declare_weak (decl);
1550 else
1551 post_error ("?weak declarations not supported on this target",
1552 attr_list->error_point);
1553 break;
1554
1555 case ATTR_LINK_SECTION:
1556 #ifdef ASM_OUTPUT_SECTION_NAME
1557 DECL_SECTION_NAME (decl)
1558 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1559 IDENTIFIER_POINTER (attr_list->name));
1560 DECL_COMMON (decl) = 0;
1561 #else
1562 post_error ("?section attributes are not supported for this target",
1563 attr_list->error_point);
1564 #endif
1565 break;
1566 }
1567 }
1568 \f
1569 /* Add some pending elaborations on the list. */
1570
1571 void
1572 add_pending_elaborations (var_decl, var_init)
1573 tree var_decl;
1574 tree var_init;
1575 {
1576 if (var_init != 0)
1577 Check_Elaboration_Code_Allowed (error_gnat_node);
1578
1579 pending_elaborations
1580 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1581 }
1582
1583 /* Obtain any pending elaborations and clear the old list. */
1584
1585 tree
1586 get_pending_elaborations ()
1587 {
1588 /* Each thing added to the list went on the end; we want it on the
1589 beginning. */
1590 tree result = TREE_CHAIN (pending_elaborations);
1591
1592 TREE_CHAIN (pending_elaborations) = 0;
1593 return result;
1594 }
1595
1596 /* Return nonzero if there are pending elaborations. */
1597
1598 int
1599 pending_elaborations_p ()
1600 {
1601 return TREE_CHAIN (pending_elaborations) != 0;
1602 }
1603
1604 /* Save a copy of the current pending elaboration list and make a new
1605 one. */
1606
1607 void
1608 push_pending_elaborations ()
1609 {
1610 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1611
1612 p->next = elist_stack;
1613 p->elab_list = pending_elaborations;
1614 elist_stack = p;
1615 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1616 }
1617
1618 /* Pop the stack of pending elaborations. */
1619
1620 void
1621 pop_pending_elaborations ()
1622 {
1623 struct e_stack *p = elist_stack;
1624
1625 pending_elaborations = p->elab_list;
1626 elist_stack = p->next;
1627 }
1628
1629 /* Return the current position in pending_elaborations so we can insert
1630 elaborations after that point. */
1631
1632 tree
1633 get_elaboration_location ()
1634 {
1635 return tree_last (pending_elaborations);
1636 }
1637
1638 /* Insert the current elaborations after ELAB, which is in some elaboration
1639 list. */
1640
1641 void
1642 insert_elaboration_list (elab)
1643 tree elab;
1644 {
1645 tree next = TREE_CHAIN (elab);
1646
1647 if (TREE_CHAIN (pending_elaborations))
1648 {
1649 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1650 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1651 TREE_CHAIN (pending_elaborations) = 0;
1652 }
1653 }
1654
1655 /* Returns a LABEL_DECL node for LABEL_NAME. */
1656
1657 tree
1658 create_label_decl (label_name)
1659 tree label_name;
1660 {
1661 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1662
1663 DECL_CONTEXT (label_decl) = current_function_decl;
1664 DECL_MODE (label_decl) = VOIDmode;
1665 DECL_SOURCE_LINE (label_decl) = lineno;
1666 DECL_SOURCE_FILE (label_decl) = input_filename;
1667
1668 return label_decl;
1669 }
1670 \f
1671 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1672 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1673 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1674 PARM_DECL nodes chained through the TREE_CHAIN field).
1675
1676 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1677 appropriate fields in the FUNCTION_DECL. */
1678
1679 tree
1680 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1681 inline_flag, public_flag, extern_flag, attr_list)
1682 tree subprog_name;
1683 tree asm_name;
1684 tree subprog_type;
1685 tree param_decl_list;
1686 int inline_flag;
1687 int public_flag;
1688 int extern_flag;
1689 struct attrib *attr_list;
1690 {
1691 tree return_type = TREE_TYPE (subprog_type);
1692 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1693
1694 /* If this is a function nested inside an inlined external function, it
1695 means we aren't going to compile the outer function unless it is
1696 actually inlined, so do the same for us. */
1697 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1698 && DECL_EXTERNAL (current_function_decl))
1699 extern_flag = 1;
1700
1701 DECL_EXTERNAL (subprog_decl) = extern_flag;
1702 TREE_PUBLIC (subprog_decl) = public_flag;
1703 DECL_INLINE (subprog_decl) = inline_flag;
1704 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1705 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1706 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1707 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1708 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1709
1710 if (asm_name != 0)
1711 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1712
1713 process_attributes (subprog_decl, attr_list);
1714
1715 /* Add this decl to the current binding level. */
1716 subprog_decl = pushdecl (subprog_decl);
1717
1718 /* Output the assembler code and/or RTL for the declaration. */
1719 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1720
1721 return subprog_decl;
1722 }
1723 \f
1724 /* Count how deep we are into nested functions. This is because
1725 we shouldn't call the backend function context routines unless we
1726 are in a nested function. */
1727
1728 static int function_nesting_depth;
1729
1730 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1731 body. This routine needs to be invoked before processing the declarations
1732 appearing in the subprogram. */
1733
1734 void
1735 begin_subprog_body (subprog_decl)
1736 tree subprog_decl;
1737 {
1738 tree param_decl_list;
1739 tree param_decl;
1740 tree next_param;
1741
1742 if (function_nesting_depth++ != 0)
1743 push_function_context ();
1744
1745 announce_function (subprog_decl);
1746
1747 /* Make this field nonzero so further routines know that this is not
1748 tentative. error_mark_node is replaced below (in poplevel) with the
1749 adequate BLOCK. */
1750 DECL_INITIAL (subprog_decl) = error_mark_node;
1751
1752 /* This function exists in static storage. This does not mean `static' in
1753 the C sense! */
1754 TREE_STATIC (subprog_decl) = 1;
1755
1756 /* Enter a new binding level. */
1757 current_function_decl = subprog_decl;
1758 pushlevel (0);
1759
1760 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1761 subprogram body) so that they can be recognized as local variables in the
1762 subprogram.
1763
1764 The list of PARM_DECL nodes is stored in the right order in
1765 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1766 which they are transmitted to `pushdecl' we need to reverse the list of
1767 PARM_DECLs if we want it to be stored in the right order. The reason why
1768 we want to make sure the PARM_DECLs are stored in the correct order is
1769 that this list will be retrieved in a few lines with a call to `getdecl'
1770 to store it back into the DECL_ARGUMENTS field. */
1771 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1772
1773 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1774 {
1775 next_param = TREE_CHAIN (param_decl);
1776 TREE_CHAIN (param_decl) = NULL;
1777 pushdecl (param_decl);
1778 }
1779
1780 /* Store back the PARM_DECL nodes. They appear in the right order. */
1781 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1782
1783 init_function_start (subprog_decl, input_filename, lineno);
1784 expand_function_start (subprog_decl, 0);
1785
1786 /* If this function is `main', emit a call to `__main'
1787 to run global initializers, etc. */
1788 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1789 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1790 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1791 expand_main_function ();
1792 }
1793
1794 /* Finish the definition of the current subprogram and compile it all the way
1795 to assembler language output. */
1796
1797 void
1798 end_subprog_body ()
1799 {
1800 tree decl;
1801 tree cico_list;
1802
1803 poplevel (1, 0, 1);
1804 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1805 = current_function_decl;
1806
1807 /* Mark the RESULT_DECL as being in this subprogram. */
1808 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1809
1810 expand_function_end (input_filename, lineno, 0);
1811
1812 /* If this is a nested function, push a new GC context. That will keep
1813 local variables on the stack from being collected while we're doing
1814 the compilation of this function. */
1815 if (function_nesting_depth > 1)
1816 ggc_push_context ();
1817
1818 rest_of_compilation (current_function_decl);
1819
1820 if (function_nesting_depth > 1)
1821 ggc_pop_context ();
1822
1823 #if 0
1824 /* If we're sure this function is defined in this file then mark it
1825 as such */
1826 if (TREE_ASM_WRITTEN (current_function_decl))
1827 mark_fn_defined_in_this_file (current_function_decl);
1828 #endif
1829
1830 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1831 not be seen when we call this function and will be in
1832 unallocated memory anyway. */
1833 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1834 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1835 TREE_VALUE (cico_list) = 0;
1836
1837 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1838 {
1839 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1840 was saved for inline, in which case the DECL_RTLs are in
1841 preserved memory. */
1842 for (decl = DECL_ARGUMENTS (current_function_decl);
1843 decl != 0; decl = TREE_CHAIN (decl))
1844 {
1845 SET_DECL_RTL (decl, 0);
1846 DECL_INCOMING_RTL (decl) = 0;
1847 }
1848
1849 /* Similarly, discard DECL_RTL of the return value. */
1850 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1851
1852 /* But DECL_INITIAL must remain nonzero so we know this
1853 was an actual function definition unless toplev.c decided not
1854 to inline it. */
1855 if (DECL_INITIAL (current_function_decl) != 0)
1856 DECL_INITIAL (current_function_decl) = error_mark_node;
1857
1858 DECL_ARGUMENTS (current_function_decl) = 0;
1859 }
1860
1861 /* If we are not at the bottom of the function nesting stack, pop up to
1862 the containing function. Otherwise show we aren't in any function. */
1863 if (--function_nesting_depth != 0)
1864 pop_function_context ();
1865 else
1866 current_function_decl = 0;
1867 }
1868 \f
1869 /* Return a definition for a builtin function named NAME and whose data type
1870 is TYPE. TYPE should be a function type with argument types.
1871 FUNCTION_CODE tells later passes how to compile calls to this function.
1872 See tree.h for its possible values.
1873
1874 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1875 the name to be called if we can't opencode the function. */
1876
1877 tree
1878 builtin_function (name, type, function_code, class, library_name)
1879 const char *name;
1880 tree type;
1881 int function_code;
1882 enum built_in_class class;
1883 const char *library_name;
1884 {
1885 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1886
1887 DECL_EXTERNAL (decl) = 1;
1888 TREE_PUBLIC (decl) = 1;
1889 if (library_name)
1890 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1891
1892 pushdecl (decl);
1893 DECL_BUILT_IN_CLASS (decl) = class;
1894 DECL_FUNCTION_CODE (decl) = function_code;
1895 return decl;
1896 }
1897
1898 /* Return an integer type with the number of bits of precision given by
1899 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1900 it is a signed type. */
1901
1902 tree
1903 gnat_type_for_size (precision, unsignedp)
1904 unsigned precision;
1905 int unsignedp;
1906 {
1907 tree t;
1908 char type_name[20];
1909
1910 if (precision <= 2 * MAX_BITS_PER_WORD
1911 && signed_and_unsigned_types[precision][unsignedp] != 0)
1912 return signed_and_unsigned_types[precision][unsignedp];
1913
1914 if (unsignedp)
1915 t = make_unsigned_type (precision);
1916 else
1917 t = make_signed_type (precision);
1918
1919 if (precision <= 2 * MAX_BITS_PER_WORD)
1920 signed_and_unsigned_types[precision][unsignedp] = t;
1921
1922 if (TYPE_NAME (t) == 0)
1923 {
1924 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1925 TYPE_NAME (t) = get_identifier (type_name);
1926 }
1927
1928 return t;
1929 }
1930
1931 /* Likewise for floating-point types. */
1932
1933 static tree
1934 float_type_for_size (precision, mode)
1935 int precision;
1936 enum machine_mode mode;
1937 {
1938 tree t;
1939 char type_name[20];
1940
1941 if (float_types[(int) mode] != 0)
1942 return float_types[(int) mode];
1943
1944 float_types[(int) mode] = t = make_node (REAL_TYPE);
1945 TYPE_PRECISION (t) = precision;
1946 layout_type (t);
1947
1948 if (TYPE_MODE (t) != mode)
1949 gigi_abort (414);
1950
1951 if (TYPE_NAME (t) == 0)
1952 {
1953 sprintf (type_name, "FLOAT_%d", precision);
1954 TYPE_NAME (t) = get_identifier (type_name);
1955 }
1956
1957 return t;
1958 }
1959
1960 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1961 an unsigned type; otherwise a signed type is returned. */
1962
1963 tree
1964 gnat_type_for_mode (mode, unsignedp)
1965 enum machine_mode mode;
1966 int unsignedp;
1967 {
1968 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1969 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1970 else
1971 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1972 }
1973
1974 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1975
1976 tree
1977 gnat_unsigned_type (type_node)
1978 tree type_node;
1979 {
1980 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1981
1982 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1983 {
1984 type = copy_node (type);
1985 TREE_TYPE (type) = type_node;
1986 }
1987 else if (TREE_TYPE (type_node) != 0
1988 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1989 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1990 {
1991 type = copy_node (type);
1992 TREE_TYPE (type) = TREE_TYPE (type_node);
1993 }
1994
1995 return type;
1996 }
1997
1998 /* Return the signed version of a TYPE_NODE, a scalar type. */
1999
2000 tree
2001 gnat_signed_type (type_node)
2002 tree type_node;
2003 {
2004 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2005
2006 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2007 {
2008 type = copy_node (type);
2009 TREE_TYPE (type) = type_node;
2010 }
2011 else if (TREE_TYPE (type_node) != 0
2012 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2013 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2014 {
2015 type = copy_node (type);
2016 TREE_TYPE (type) = TREE_TYPE (type_node);
2017 }
2018
2019 return type;
2020 }
2021
2022 /* Return a type the same as TYPE except unsigned or signed according to
2023 UNSIGNEDP. */
2024
2025 tree
2026 gnat_signed_or_unsigned_type (unsignedp, type)
2027 int unsignedp;
2028 tree type;
2029 {
2030 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2031 return type;
2032 else
2033 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2034 }
2035 \f
2036 /* EXP is an expression for the size of an object. If this size contains
2037 discriminant references, replace them with the maximum (if MAX_P) or
2038 minimum (if ! MAX_P) possible value of the discriminant. */
2039
2040 tree
2041 max_size (exp, max_p)
2042 tree exp;
2043 int max_p;
2044 {
2045 enum tree_code code = TREE_CODE (exp);
2046 tree type = TREE_TYPE (exp);
2047
2048 switch (TREE_CODE_CLASS (code))
2049 {
2050 case 'd':
2051 case 'c':
2052 return exp;
2053
2054 case 'x':
2055 if (code == TREE_LIST)
2056 return tree_cons (TREE_PURPOSE (exp),
2057 max_size (TREE_VALUE (exp), max_p),
2058 TREE_CHAIN (exp) != 0
2059 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2060 break;
2061
2062 case 'r':
2063 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2064 modify. Otherwise, we abort since it is something we can't
2065 handle. */
2066 if (! contains_placeholder_p (exp))
2067 gigi_abort (406);
2068
2069 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2070 return
2071 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2072
2073 case '<':
2074 return max_p ? size_one_node : size_zero_node;
2075
2076 case '1':
2077 case '2':
2078 case 'e':
2079 switch (TREE_CODE_LENGTH (code))
2080 {
2081 case 1:
2082 if (code == NON_LVALUE_EXPR)
2083 return max_size (TREE_OPERAND (exp, 0), max_p);
2084 else
2085 return
2086 fold (build1 (code, type,
2087 max_size (TREE_OPERAND (exp, 0),
2088 code == NEGATE_EXPR ? ! max_p : max_p)));
2089
2090 case 2:
2091 if (code == RTL_EXPR)
2092 gigi_abort (407);
2093 else if (code == COMPOUND_EXPR)
2094 return max_size (TREE_OPERAND (exp, 1), max_p);
2095 else if (code == WITH_RECORD_EXPR)
2096 return exp;
2097
2098 {
2099 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2100 tree rhs = max_size (TREE_OPERAND (exp, 1),
2101 code == MINUS_EXPR ? ! max_p : max_p);
2102
2103 /* Special-case wanting the maximum value of a MIN_EXPR.
2104 In that case, if one side overflows, return the other.
2105 sizetype is signed, but we know sizes are non-negative.
2106 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2107 overflowing or the maximum possible value and the RHS
2108 a variable. */
2109 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2110 return lhs;
2111 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2112 return rhs;
2113 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2114 && (TREE_OVERFLOW (lhs)
2115 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2116 && ! TREE_CONSTANT (rhs))
2117 return lhs;
2118 else
2119 return fold (build (code, type, lhs, rhs));
2120 }
2121
2122 case 3:
2123 if (code == SAVE_EXPR)
2124 return exp;
2125 else if (code == COND_EXPR)
2126 return fold (build (MAX_EXPR, type,
2127 max_size (TREE_OPERAND (exp, 1), max_p),
2128 max_size (TREE_OPERAND (exp, 2), max_p)));
2129 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2130 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2131 max_size (TREE_OPERAND (exp, 1), max_p));
2132 }
2133 }
2134
2135 gigi_abort (408);
2136 }
2137 \f
2138 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2139 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2140 Return a constructor for the template. */
2141
2142 tree
2143 build_template (template_type, array_type, expr)
2144 tree template_type;
2145 tree array_type;
2146 tree expr;
2147 {
2148 tree template_elts = NULL_TREE;
2149 tree bound_list = NULL_TREE;
2150 tree field;
2151
2152 if (TREE_CODE (array_type) == RECORD_TYPE
2153 && (TYPE_IS_PADDING_P (array_type)
2154 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2155 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2156
2157 if (TREE_CODE (array_type) == ARRAY_TYPE
2158 || (TREE_CODE (array_type) == INTEGER_TYPE
2159 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2160 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2161
2162 /* First make the list for a CONSTRUCTOR for the template. Go down the
2163 field list of the template instead of the type chain because this
2164 array might be an Ada array of arrays and we can't tell where the
2165 nested arrays stop being the underlying object. */
2166
2167 for (field = TYPE_FIELDS (template_type); field;
2168 (bound_list != 0
2169 ? (bound_list = TREE_CHAIN (bound_list))
2170 : (array_type = TREE_TYPE (array_type))),
2171 field = TREE_CHAIN (TREE_CHAIN (field)))
2172 {
2173 tree bounds, min, max;
2174
2175 /* If we have a bound list, get the bounds from there. Likewise
2176 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2177 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2178 This will give us a maximum range. */
2179 if (bound_list != 0)
2180 bounds = TREE_VALUE (bound_list);
2181 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2182 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2183 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2184 && DECL_BY_COMPONENT_PTR_P (expr))
2185 bounds = TREE_TYPE (field);
2186 else
2187 gigi_abort (411);
2188
2189 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2190 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2191
2192 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2193 surround them with a WITH_RECORD_EXPR giving EXPR as the
2194 OBJECT. */
2195 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2196 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2197 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2198 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2199
2200 template_elts = tree_cons (TREE_CHAIN (field), max,
2201 tree_cons (field, min, template_elts));
2202 }
2203
2204 return build_constructor (template_type, nreverse (template_elts));
2205 }
2206 \f
2207 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2208 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2209 in the type contains in its DECL_INITIAL the expression to use when
2210 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2211 to print out an error message if the mechanism cannot be applied to
2212 an object of that type and also for the name. */
2213
2214 tree
2215 build_vms_descriptor (type, mech, gnat_entity)
2216 tree type;
2217 Mechanism_Type mech;
2218 Entity_Id gnat_entity;
2219 {
2220 tree record_type = make_node (RECORD_TYPE);
2221 tree field_list = 0;
2222 int class;
2223 int dtype = 0;
2224 tree inner_type;
2225 int ndim;
2226 int i;
2227 tree *idx_arr;
2228 tree tem;
2229
2230 /* If TYPE is an unconstrained array, use the underlying array type. */
2231 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2232 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2233
2234 /* If this is an array, compute the number of dimensions in the array,
2235 get the index types, and point to the inner type. */
2236 if (TREE_CODE (type) != ARRAY_TYPE)
2237 ndim = 0;
2238 else
2239 for (ndim = 1, inner_type = type;
2240 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2241 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2242 ndim++, inner_type = TREE_TYPE (inner_type))
2243 ;
2244
2245 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2246
2247 if (mech != By_Descriptor_NCA
2248 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2249 for (i = ndim - 1, inner_type = type;
2250 i >= 0;
2251 i--, inner_type = TREE_TYPE (inner_type))
2252 idx_arr[i] = TYPE_DOMAIN (inner_type);
2253 else
2254 for (i = 0, inner_type = type;
2255 i < ndim;
2256 i++, inner_type = TREE_TYPE (inner_type))
2257 idx_arr[i] = TYPE_DOMAIN (inner_type);
2258
2259 /* Now get the DTYPE value. */
2260 switch (TREE_CODE (type))
2261 {
2262 case INTEGER_TYPE:
2263 case ENUMERAL_TYPE:
2264 if (TYPE_VAX_FLOATING_POINT_P (type))
2265 switch ((int) TYPE_DIGITS_VALUE (type))
2266 {
2267 case 6:
2268 dtype = 10;
2269 break;
2270 case 9:
2271 dtype = 11;
2272 break;
2273 case 15:
2274 dtype = 27;
2275 break;
2276 }
2277 else
2278 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2279 {
2280 case 8:
2281 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2282 break;
2283 case 16:
2284 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2285 break;
2286 case 32:
2287 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2288 break;
2289 case 64:
2290 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2291 break;
2292 case 128:
2293 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2294 break;
2295 }
2296 break;
2297
2298 case REAL_TYPE:
2299 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2300 break;
2301
2302 case COMPLEX_TYPE:
2303 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2304 && TYPE_VAX_FLOATING_POINT_P (type))
2305 switch ((int) TYPE_DIGITS_VALUE (type))
2306 {
2307 case 6:
2308 dtype = 12;
2309 break;
2310 case 9:
2311 dtype = 13;
2312 break;
2313 case 15:
2314 dtype = 29;
2315 }
2316 else
2317 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2318 break;
2319
2320 case ARRAY_TYPE:
2321 dtype = 14;
2322 break;
2323
2324 default:
2325 break;
2326 }
2327
2328 /* Get the CLASS value. */
2329 switch (mech)
2330 {
2331 case By_Descriptor_A:
2332 class = 4;
2333 break;
2334 case By_Descriptor_NCA:
2335 class = 10;
2336 break;
2337 case By_Descriptor_SB:
2338 class = 15;
2339 break;
2340 default:
2341 class = 1;
2342 }
2343
2344 /* Make the type for a descriptor for VMS. The first four fields
2345 are the same for all types. */
2346
2347 field_list
2348 = chainon (field_list,
2349 make_descriptor_field
2350 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2351 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2352
2353 field_list = chainon (field_list,
2354 make_descriptor_field ("DTYPE",
2355 gnat_type_for_size (8, 1),
2356 record_type, size_int (dtype)));
2357 field_list = chainon (field_list,
2358 make_descriptor_field ("CLASS",
2359 gnat_type_for_size (8, 1),
2360 record_type, size_int (class)));
2361
2362 field_list
2363 = chainon (field_list,
2364 make_descriptor_field ("POINTER",
2365 build_pointer_type (type),
2366 record_type,
2367 build1 (ADDR_EXPR,
2368 build_pointer_type (type),
2369 build (PLACEHOLDER_EXPR,
2370 type))));
2371
2372 switch (mech)
2373 {
2374 case By_Descriptor:
2375 case By_Descriptor_S:
2376 break;
2377
2378 case By_Descriptor_SB:
2379 field_list
2380 = chainon (field_list,
2381 make_descriptor_field
2382 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2383 TREE_CODE (type) == ARRAY_TYPE
2384 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2385 field_list
2386 = chainon (field_list,
2387 make_descriptor_field
2388 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2389 TREE_CODE (type) == ARRAY_TYPE
2390 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2391 break;
2392
2393 case By_Descriptor_A:
2394 case By_Descriptor_NCA:
2395 field_list = chainon (field_list,
2396 make_descriptor_field ("SCALE",
2397 gnat_type_for_size (8, 1),
2398 record_type,
2399 size_zero_node));
2400
2401 field_list = chainon (field_list,
2402 make_descriptor_field ("DIGITS",
2403 gnat_type_for_size (8, 1),
2404 record_type,
2405 size_zero_node));
2406
2407 field_list
2408 = chainon (field_list,
2409 make_descriptor_field
2410 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2411 size_int (mech == By_Descriptor_NCA
2412 ? 0
2413 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2414 : (TREE_CODE (type) == ARRAY_TYPE
2415 && TYPE_CONVENTION_FORTRAN_P (type)
2416 ? 224 : 192))));
2417
2418 field_list = chainon (field_list,
2419 make_descriptor_field ("DIMCT",
2420 gnat_type_for_size (8, 1),
2421 record_type,
2422 size_int (ndim)));
2423
2424 field_list = chainon (field_list,
2425 make_descriptor_field ("ARSIZE",
2426 gnat_type_for_size (32, 1),
2427 record_type,
2428 size_in_bytes (type)));
2429
2430 /* Now build a pointer to the 0,0,0... element. */
2431 tem = build (PLACEHOLDER_EXPR, type);
2432 for (i = 0, inner_type = type; i < ndim;
2433 i++, inner_type = TREE_TYPE (inner_type))
2434 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2435 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2436
2437 field_list
2438 = chainon (field_list,
2439 make_descriptor_field
2440 ("A0", build_pointer_type (inner_type), record_type,
2441 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2442
2443 /* Next come the addressing coefficients. */
2444 tem = size_int (1);
2445 for (i = 0; i < ndim; i++)
2446 {
2447 char fname[3];
2448 tree idx_length
2449 = size_binop (MULT_EXPR, tem,
2450 size_binop (PLUS_EXPR,
2451 size_binop (MINUS_EXPR,
2452 TYPE_MAX_VALUE (idx_arr[i]),
2453 TYPE_MIN_VALUE (idx_arr[i])),
2454 size_int (1)));
2455
2456 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2457 fname[1] = '0' + i, fname[2] = 0;
2458 field_list
2459 = chainon (field_list,
2460 make_descriptor_field (fname,
2461 gnat_type_for_size (32, 1),
2462 record_type, idx_length));
2463
2464 if (mech == By_Descriptor_NCA)
2465 tem = idx_length;
2466 }
2467
2468 /* Finally here are the bounds. */
2469 for (i = 0; i < ndim; i++)
2470 {
2471 char fname[3];
2472
2473 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2474 field_list
2475 = chainon (field_list,
2476 make_descriptor_field
2477 (fname, gnat_type_for_size (32, 1), record_type,
2478 TYPE_MIN_VALUE (idx_arr[i])));
2479
2480 fname[0] = 'U';
2481 field_list
2482 = chainon (field_list,
2483 make_descriptor_field
2484 (fname, gnat_type_for_size (32, 1), record_type,
2485 TYPE_MAX_VALUE (idx_arr[i])));
2486 }
2487 break;
2488
2489 default:
2490 post_error ("unsupported descriptor type for &", gnat_entity);
2491 }
2492
2493 finish_record_type (record_type, field_list, 0, 1);
2494 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2495 record_type));
2496
2497 return record_type;
2498 }
2499
2500 /* Utility routine for above code to make a field. */
2501
2502 static tree
2503 make_descriptor_field (name, type, rec_type, initial)
2504 const char *name;
2505 tree type;
2506 tree rec_type;
2507 tree initial;
2508 {
2509 tree field
2510 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2511
2512 DECL_INITIAL (field) = initial;
2513 return field;
2514 }
2515 \f
2516 /* Build a type to be used to represent an aliased object whose nominal
2517 type is an unconstrained array. This consists of a RECORD_TYPE containing
2518 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2519 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2520 is used to represent an arbitrary unconstrained object. Use NAME
2521 as the name of the record. */
2522
2523 tree
2524 build_unc_object_type (template_type, object_type, name)
2525 tree template_type;
2526 tree object_type;
2527 tree name;
2528 {
2529 tree type = make_node (RECORD_TYPE);
2530 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2531 template_type, type, 0, 0, 0, 1);
2532 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2533 type, 0, 0, 0, 1);
2534
2535 TYPE_NAME (type) = name;
2536 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2537 finish_record_type (type,
2538 chainon (chainon (NULL_TREE, template_field),
2539 array_field),
2540 0, 0);
2541
2542 return type;
2543 }
2544 \f
2545 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2546 the normal case this is just two adjustments, but we have more to do
2547 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2548
2549 void
2550 update_pointer_to (old_type, new_type)
2551 tree old_type;
2552 tree new_type;
2553 {
2554 tree ptr = TYPE_POINTER_TO (old_type);
2555 tree ref = TYPE_REFERENCE_TO (old_type);
2556 tree type;
2557
2558 /* If this is the main variant, process all the other variants first. */
2559 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2560 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2561 type = TYPE_NEXT_VARIANT (type))
2562 update_pointer_to (type, new_type);
2563
2564 /* If no pointer or reference, we are done. Otherwise, get the new type with
2565 the same qualifiers as the old type and see if it is the same as the old
2566 type. */
2567 if (ptr == 0 && ref == 0)
2568 return;
2569
2570 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2571 if (old_type == new_type)
2572 return;
2573
2574 /* First handle the simple case. */
2575 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2576 {
2577 if (ptr != 0)
2578 TREE_TYPE (ptr) = new_type;
2579 TYPE_POINTER_TO (new_type) = ptr;
2580
2581 if (ref != 0)
2582 TREE_TYPE (ref) = new_type;
2583 TYPE_REFERENCE_TO (new_type) = ref;
2584
2585 if (ptr != 0 && TYPE_NAME (ptr) != 0
2586 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2587 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2588 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2589 global_bindings_p (), 0);
2590 if (ref != 0 && TYPE_NAME (ref) != 0
2591 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2592 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2593 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2594 global_bindings_p (), 0);
2595 }
2596
2597 /* Now deal with the unconstrained array case. In this case the "pointer"
2598 is actually a RECORD_TYPE where the types of both fields are
2599 pointers to void. In that case, copy the field list from the
2600 old type to the new one and update the fields' context. */
2601 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2602 gigi_abort (412);
2603
2604 else
2605 {
2606 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2607 tree ptr_temp_type;
2608 tree new_ref;
2609 tree var;
2610
2611 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2612 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2613 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2614
2615 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2616 template bounds.
2617
2618 ??? This is now the only use of gnat_substitute_in_type, which
2619 is now a very "heavy" routine to do this, so it should be replaced
2620 at some point. */
2621 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2622 new_ref = build (COMPONENT_REF, ptr_temp_type,
2623 build (PLACEHOLDER_EXPR, ptr),
2624 TREE_CHAIN (TYPE_FIELDS (ptr)));
2625
2626 update_pointer_to
2627 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2628 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2629 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2630
2631 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2632 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2633
2634 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2635 = TREE_TYPE (new_type) = ptr;
2636
2637 /* Now handle updating the allocation record, what the thin pointer
2638 points to. Update all pointers from the old record into the new
2639 one, update the types of the fields, and recompute the size. */
2640
2641 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2642
2643 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2644 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2645 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2646 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2647 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2648 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2649 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2650
2651 TYPE_SIZE (new_obj_rec)
2652 = size_binop (PLUS_EXPR,
2653 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2654 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2655 TYPE_SIZE_UNIT (new_obj_rec)
2656 = size_binop (PLUS_EXPR,
2657 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2658 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2659 rest_of_type_compilation (ptr, global_bindings_p ());
2660 }
2661 }
2662 \f
2663 /* Convert a pointer to a constrained array into a pointer to a fat
2664 pointer. This involves making or finding a template. */
2665
2666 static tree
2667 convert_to_fat_pointer (type, expr)
2668 tree type;
2669 tree expr;
2670 {
2671 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2672 tree template, template_addr;
2673 tree etype = TREE_TYPE (expr);
2674
2675 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2676 pointer to the template and array. */
2677 if (integer_zerop (expr))
2678 return
2679 build_constructor
2680 (type,
2681 tree_cons (TYPE_FIELDS (type),
2682 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2683 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2684 convert (build_pointer_type (template_type),
2685 expr),
2686 NULL_TREE)));
2687
2688 /* If EXPR is a thin pointer, make the template and data from the record. */
2689
2690 else if (TYPE_THIN_POINTER_P (etype))
2691 {
2692 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2693
2694 expr = save_expr (expr);
2695 if (TREE_CODE (expr) == ADDR_EXPR)
2696 expr = TREE_OPERAND (expr, 0);
2697 else
2698 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2699
2700 template = build_component_ref (expr, NULL_TREE, fields);
2701 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2702 build_component_ref (expr, NULL_TREE,
2703 TREE_CHAIN (fields)));
2704 }
2705 else
2706 /* Otherwise, build the constructor for the template. */
2707 template = build_template (template_type, TREE_TYPE (etype), expr);
2708
2709 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2710
2711 /* The result is a CONSTRUCTOR for the fat pointer. */
2712 return
2713 build_constructor (type,
2714 tree_cons (TYPE_FIELDS (type), expr,
2715 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2716 template_addr, NULL_TREE)));
2717 }
2718 \f
2719 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2720 is something that is a fat pointer, so convert to it first if it EXPR
2721 is not already a fat pointer. */
2722
2723 static tree
2724 convert_to_thin_pointer (type, expr)
2725 tree type;
2726 tree expr;
2727 {
2728 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2729 expr
2730 = convert_to_fat_pointer
2731 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2732
2733 /* We get the pointer to the data and use a NOP_EXPR to make it the
2734 proper GCC type. */
2735 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2736 expr = build1 (NOP_EXPR, type, expr);
2737
2738 return expr;
2739 }
2740 \f
2741 /* Create an expression whose value is that of EXPR,
2742 converted to type TYPE. The TREE_TYPE of the value
2743 is always TYPE. This function implements all reasonable
2744 conversions; callers should filter out those that are
2745 not permitted by the language being compiled. */
2746
2747 tree
2748 convert (type, expr)
2749 tree type, expr;
2750 {
2751 enum tree_code code = TREE_CODE (type);
2752 tree etype = TREE_TYPE (expr);
2753 enum tree_code ecode = TREE_CODE (etype);
2754 tree tem;
2755
2756 /* If EXPR is already the right type, we are done. */
2757 if (type == etype)
2758 return expr;
2759
2760 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2761 new one. */
2762 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2763 return build (WITH_RECORD_EXPR, type,
2764 convert (type, TREE_OPERAND (expr, 0)),
2765 TREE_OPERAND (expr, 1));
2766
2767 /* If the input type has padding, remove it by doing a component reference
2768 to the field. If the output type has padding, make a constructor
2769 to build the record. If both input and output have padding and are
2770 of variable size, do this as an unchecked conversion. */
2771 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2772 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2773 && (! TREE_CONSTANT (TYPE_SIZE (type))
2774 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2775 ;
2776 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2777 {
2778 /* If we have just converted to this padded type, just get
2779 the inner expression. */
2780 if (TREE_CODE (expr) == CONSTRUCTOR
2781 && CONSTRUCTOR_ELTS (expr) != 0
2782 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2783 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2784 else
2785 return convert (type, build_component_ref (expr, NULL_TREE,
2786 TYPE_FIELDS (etype)));
2787 }
2788 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2789 {
2790 /* If we previously converted from another type and our type is
2791 of variable size, remove the conversion to avoid the need for
2792 variable-size temporaries. */
2793 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2794 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2795 expr = TREE_OPERAND (expr, 0);
2796
2797 /* If we are just removing the padding from expr, convert the original
2798 object if we have variable size. That will avoid the need
2799 for some variable-size temporaries. */
2800 if (TREE_CODE (expr) == COMPONENT_REF
2801 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2802 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2803 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2804 return convert (type, TREE_OPERAND (expr, 0));
2805
2806 /* If the result type is a padded type with a self-referentially-sized
2807 field and the expression type is a record, do this as an
2808 unchecked converstion. */
2809 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2810 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2811 && TREE_CODE (etype) == RECORD_TYPE)
2812 return unchecked_convert (type, expr);
2813
2814 else
2815 return
2816 build_constructor (type,
2817 tree_cons (TYPE_FIELDS (type),
2818 convert (TREE_TYPE
2819 (TYPE_FIELDS (type)),
2820 expr),
2821 NULL_TREE));
2822 }
2823
2824 /* If the input is a biased type, adjust first. */
2825 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2826 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2827 fold (build1 (GNAT_NOP_EXPR,
2828 TREE_TYPE (etype), expr)),
2829 TYPE_MIN_VALUE (etype))));
2830
2831 /* If the input is a left-justified modular type, we need to extract
2832 the actual object before converting it to any other type with the
2833 exception of an unconstrained array. */
2834 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2835 && code != UNCONSTRAINED_ARRAY_TYPE)
2836 return convert (type, build_component_ref (expr, NULL_TREE,
2837 TYPE_FIELDS (etype)));
2838
2839 /* If converting a type that does not contain a template into one
2840 that does, convert to the data type and then build the template. */
2841 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2842 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2843 {
2844 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2845
2846 return
2847 build_constructor
2848 (type,
2849 tree_cons (TYPE_FIELDS (type),
2850 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2851 obj_type, NULL_TREE),
2852 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2853 convert (obj_type, expr), NULL_TREE)));
2854 }
2855
2856 /* There are some special cases of expressions that we process
2857 specially. */
2858 switch (TREE_CODE (expr))
2859 {
2860 case ERROR_MARK:
2861 return expr;
2862
2863 case TRANSFORM_EXPR:
2864 case NULL_EXPR:
2865 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2866 conversion in gnat_expand_expr. NULL_EXPR does not represent
2867 and actual value, so no conversion is needed. */
2868 TREE_TYPE (expr) = type;
2869 return expr;
2870
2871 case STRING_CST:
2872 case CONSTRUCTOR:
2873 /* If we are converting a STRING_CST to another constrained array type,
2874 just make a new one in the proper type. Likewise for a
2875 CONSTRUCTOR. But if the mode of the type is different, we must
2876 ensure a new RTL is made for the constant. */
2877 if (code == ecode && AGGREGATE_TYPE_P (etype)
2878 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2879 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2880 {
2881 expr = copy_node (expr);
2882 TREE_TYPE (expr) = type;
2883
2884 if (TYPE_MODE (type) != TYPE_MODE (etype))
2885 TREE_CST_RTL (expr) = 0;
2886
2887 return expr;
2888 }
2889 break;
2890
2891 case COMPONENT_REF:
2892 /* If we are converting between two aggregate types of the same
2893 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2894 This avoid unneeded conversions which makes reference computations
2895 more complex. */
2896 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2897 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2898 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2899 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2900 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2901 TREE_OPERAND (expr, 1));
2902
2903 break;
2904
2905 case UNCONSTRAINED_ARRAY_REF:
2906 /* Convert this to the type of the inner array by getting the address of
2907 the array from the template. */
2908 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2909 build_component_ref (TREE_OPERAND (expr, 0),
2910 get_identifier ("P_ARRAY"),
2911 NULL_TREE));
2912 etype = TREE_TYPE (expr);
2913 ecode = TREE_CODE (etype);
2914 break;
2915
2916 case VIEW_CONVERT_EXPR:
2917 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2918 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2919 return convert (type, TREE_OPERAND (expr, 0));
2920 break;
2921
2922 case INDIRECT_REF:
2923 /* If both types are record types, just convert the pointer and
2924 make a new INDIRECT_REF.
2925
2926 ??? Disable this for now since it causes problems with the
2927 code in build_binary_op for MODIFY_EXPR which wants to
2928 strip off conversions. But that code really is a mess and
2929 we need to do this a much better way some time. */
2930 if (0
2931 && (TREE_CODE (type) == RECORD_TYPE
2932 || TREE_CODE (type) == UNION_TYPE)
2933 && (TREE_CODE (etype) == RECORD_TYPE
2934 || TREE_CODE (etype) == UNION_TYPE)
2935 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2936 return build_unary_op (INDIRECT_REF, NULL_TREE,
2937 convert (build_pointer_type (type),
2938 TREE_OPERAND (expr, 0)));
2939 break;
2940
2941 default:
2942 break;
2943 }
2944
2945 /* Check for converting to a pointer to an unconstrained array. */
2946 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2947 return convert_to_fat_pointer (type, expr);
2948
2949 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2950 || (code == INTEGER_CST && ecode == INTEGER_CST
2951 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2952 return fold (build1 (NOP_EXPR, type, expr));
2953
2954 switch (code)
2955 {
2956 case VOID_TYPE:
2957 return build1 (CONVERT_EXPR, type, expr);
2958
2959 case INTEGER_TYPE:
2960 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2961 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2962 return unchecked_convert (type, expr);
2963 else if (TYPE_BIASED_REPRESENTATION_P (type))
2964 return fold (build1 (CONVERT_EXPR, type,
2965 fold (build (MINUS_EXPR, TREE_TYPE (type),
2966 convert (TREE_TYPE (type), expr),
2967 TYPE_MIN_VALUE (type)))));
2968
2969 /* ... fall through ... */
2970
2971 case ENUMERAL_TYPE:
2972 return fold (convert_to_integer (type, expr));
2973
2974 case POINTER_TYPE:
2975 case REFERENCE_TYPE:
2976 /* If converting between two pointers to records denoting
2977 both a template and type, adjust if needed to account
2978 for any differing offsets, since one might be negative. */
2979 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2980 {
2981 tree bit_diff
2982 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2983 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2984 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2985 sbitsize_int (BITS_PER_UNIT));
2986
2987 expr = build1 (NOP_EXPR, type, expr);
2988 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2989 if (integer_zerop (byte_diff))
2990 return expr;
2991
2992 return build_binary_op (PLUS_EXPR, type, expr,
2993 fold (convert_to_pointer (type, byte_diff)));
2994 }
2995
2996 /* If converting to a thin pointer, handle specially. */
2997 if (TYPE_THIN_POINTER_P (type)
2998 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
2999 return convert_to_thin_pointer (type, expr);
3000
3001 /* If converting fat pointer to normal pointer, get the pointer to the
3002 array and then convert it. */
3003 else if (TYPE_FAT_POINTER_P (etype))
3004 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3005 NULL_TREE);
3006
3007 return fold (convert_to_pointer (type, expr));
3008
3009 case REAL_TYPE:
3010 return fold (convert_to_real (type, expr));
3011
3012 case RECORD_TYPE:
3013 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3014 return
3015 build_constructor
3016 (type, tree_cons (TYPE_FIELDS (type),
3017 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3018 NULL_TREE));
3019
3020 /* ... fall through ... */
3021
3022 case ARRAY_TYPE:
3023 /* In these cases, assume the front-end has validated the conversion.
3024 If the conversion is valid, it will be a bit-wise conversion, so
3025 it can be viewed as an unchecked conversion. */
3026 return unchecked_convert (type, expr);
3027
3028 case UNION_TYPE:
3029 /* Just validate that the type is indeed that of a field
3030 of the type. Then make the simple conversion. */
3031 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3032 if (TREE_TYPE (tem) == etype)
3033 return build1 (CONVERT_EXPR, type, expr);
3034
3035 gigi_abort (413);
3036
3037 case UNCONSTRAINED_ARRAY_TYPE:
3038 /* If EXPR is a constrained array, take its address, convert it to a
3039 fat pointer, and then dereference it. Likewise if EXPR is a
3040 record containing both a template and a constrained array.
3041 Note that a record representing a left justified modular type
3042 always represents a packed constrained array. */
3043 if (ecode == ARRAY_TYPE
3044 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3045 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3046 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3047 return
3048 build_unary_op
3049 (INDIRECT_REF, NULL_TREE,
3050 convert_to_fat_pointer (TREE_TYPE (type),
3051 build_unary_op (ADDR_EXPR,
3052 NULL_TREE, expr)));
3053
3054 /* Do something very similar for converting one unconstrained
3055 array to another. */
3056 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3057 return
3058 build_unary_op (INDIRECT_REF, NULL_TREE,
3059 convert (TREE_TYPE (type),
3060 build_unary_op (ADDR_EXPR,
3061 NULL_TREE, expr)));
3062 else
3063 gigi_abort (409);
3064
3065 case COMPLEX_TYPE:
3066 return fold (convert_to_complex (type, expr));
3067
3068 default:
3069 gigi_abort (410);
3070 }
3071 }
3072 \f
3073 /* Remove all conversions that are done in EXP. This includes converting
3074 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3075 is nonzero, always return the address of the containing object even if
3076 the address is not bit-aligned. */
3077
3078 tree
3079 remove_conversions (exp, true_address)
3080 tree exp;
3081 int true_address;
3082 {
3083 switch (TREE_CODE (exp))
3084 {
3085 case CONSTRUCTOR:
3086 if (true_address
3087 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3088 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3089 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3090 break;
3091
3092 case COMPONENT_REF:
3093 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3094 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3095 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3096 break;
3097
3098 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3099 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3100 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3101
3102 default:
3103 break;
3104 }
3105
3106 return exp;
3107 }
3108 \f
3109 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3110 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3111 likewise return an expression pointing to the underlying array. */
3112
3113 tree
3114 maybe_unconstrained_array (exp)
3115 tree exp;
3116 {
3117 enum tree_code code = TREE_CODE (exp);
3118 tree new;
3119
3120 switch (TREE_CODE (TREE_TYPE (exp)))
3121 {
3122 case UNCONSTRAINED_ARRAY_TYPE:
3123 if (code == UNCONSTRAINED_ARRAY_REF)
3124 {
3125 new
3126 = build_unary_op (INDIRECT_REF, NULL_TREE,
3127 build_component_ref (TREE_OPERAND (exp, 0),
3128 get_identifier ("P_ARRAY"),
3129 NULL_TREE));
3130 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3131 return new;
3132 }
3133
3134 else if (code == NULL_EXPR)
3135 return build1 (NULL_EXPR,
3136 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3137 (TREE_TYPE (TREE_TYPE (exp))))),
3138 TREE_OPERAND (exp, 0));
3139
3140 else if (code == WITH_RECORD_EXPR
3141 && (TREE_OPERAND (exp, 0)
3142 != (new = maybe_unconstrained_array
3143 (TREE_OPERAND (exp, 0)))))
3144 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3145 TREE_OPERAND (exp, 1));
3146
3147 case RECORD_TYPE:
3148 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3149 {
3150 new
3151 = build_component_ref (exp, NULL_TREE,
3152 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3153 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3154 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3155 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3156
3157 return new;
3158 }
3159 break;
3160
3161 default:
3162 break;
3163 }
3164
3165 return exp;
3166 }
3167 \f
3168 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3169
3170 tree
3171 unchecked_convert (type, expr)
3172 tree type;
3173 tree expr;
3174 {
3175 tree etype = TREE_TYPE (expr);
3176
3177 /* If the expression is already the right type, we are done. */
3178 if (etype == type)
3179 return expr;
3180
3181 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3182 new one. */
3183 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3184 return build (WITH_RECORD_EXPR, type,
3185 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3186 TREE_OPERAND (expr, 1));
3187
3188 /* If both types types are integral just do a normal conversion.
3189 Likewise for a conversion to an unconstrained array. */
3190 if ((((INTEGRAL_TYPE_P (type)
3191 && ! (TREE_CODE (type) == INTEGER_TYPE
3192 && TYPE_VAX_FLOATING_POINT_P (type)))
3193 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3194 || (TREE_CODE (type) == RECORD_TYPE
3195 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3196 && ((INTEGRAL_TYPE_P (etype)
3197 && ! (TREE_CODE (etype) == INTEGER_TYPE
3198 && TYPE_VAX_FLOATING_POINT_P (etype)))
3199 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3200 || (TREE_CODE (etype) == RECORD_TYPE
3201 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3202 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3203 {
3204 tree rtype = type;
3205
3206 if (TREE_CODE (etype) == INTEGER_TYPE
3207 && TYPE_BIASED_REPRESENTATION_P (etype))
3208 {
3209 tree ntype = copy_type (etype);
3210
3211 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3212 TYPE_MAIN_VARIANT (ntype) = ntype;
3213 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3214 }
3215
3216 if (TREE_CODE (type) == INTEGER_TYPE
3217 && TYPE_BIASED_REPRESENTATION_P (type))
3218 {
3219 rtype = copy_type (type);
3220 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3221 TYPE_MAIN_VARIANT (rtype) = rtype;
3222 }
3223
3224 expr = convert (rtype, expr);
3225 if (type != rtype)
3226 expr = build1 (GNAT_NOP_EXPR, type, expr);
3227 }
3228
3229 /* If we are converting TO an integral type whose precision is not the
3230 same as its size, first unchecked convert to a record that contains
3231 an object of the output type. Then extract the field. */
3232 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3233 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3234 GET_MODE_BITSIZE (TYPE_MODE (type))))
3235 {
3236 tree rec_type = make_node (RECORD_TYPE);
3237 tree field = create_field_decl (get_identifier ("OBJ"), type,
3238 rec_type, 1, 0, 0, 0);
3239
3240 TYPE_FIELDS (rec_type) = field;
3241 layout_type (rec_type);
3242
3243 expr = unchecked_convert (rec_type, expr);
3244 expr = build_component_ref (expr, NULL_TREE, field);
3245 }
3246
3247 /* Similarly for integral input type whose precision is not equal to its
3248 size. */
3249 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3250 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3251 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3252 {
3253 tree rec_type = make_node (RECORD_TYPE);
3254 tree field
3255 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3256 1, 0, 0, 0);
3257
3258 TYPE_FIELDS (rec_type) = field;
3259 layout_type (rec_type);
3260
3261 expr = build_constructor (rec_type, build_tree_list (field, expr));
3262 expr = unchecked_convert (type, expr);
3263 }
3264
3265 /* We have a special case when we are converting between two
3266 unconstrained array types. In that case, take the address,
3267 convert the fat pointer types, and dereference. */
3268 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3269 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3270 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3271 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3272 build_unary_op (ADDR_EXPR, NULL_TREE,
3273 expr)));
3274 else
3275 {
3276 expr = maybe_unconstrained_array (expr);
3277 etype = TREE_TYPE (expr);
3278 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3279 }
3280
3281 /* If the result is an integral type whose size is not equal to
3282 the size of the underlying machine type, sign- or zero-extend
3283 the result. We need not do this in the case where the input is
3284 an integral type of the same precision and signedness or if the output
3285 is a biased type or if both the input and output are unsigned. */
3286 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3287 && ! (TREE_CODE (type) == INTEGER_TYPE
3288 && TYPE_BIASED_REPRESENTATION_P (type))
3289 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3290 GET_MODE_BITSIZE (TYPE_MODE (type)))
3291 && ! (INTEGRAL_TYPE_P (etype)
3292 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3293 && operand_equal_p (TYPE_RM_SIZE (type),
3294 (TYPE_RM_SIZE (etype) != 0
3295 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3296 0))
3297 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3298 {
3299 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3300 TREE_UNSIGNED (type));
3301 tree shift_expr
3302 = convert (base_type,
3303 size_binop (MINUS_EXPR,
3304 bitsize_int
3305 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3306 TYPE_RM_SIZE (type)));
3307 expr
3308 = convert (type,
3309 build_binary_op (RSHIFT_EXPR, base_type,
3310 build_binary_op (LSHIFT_EXPR, base_type,
3311 convert (base_type, expr),
3312 shift_expr),
3313 shift_expr));
3314 }
3315
3316 /* An unchecked conversion should never raise Constraint_Error. The code
3317 below assumes that GCC's conversion routines overflow the same way that
3318 the underlying hardware does. This is probably true. In the rare case
3319 when it is false, we can rely on the fact that such conversions are
3320 erroneous anyway. */
3321 if (TREE_CODE (expr) == INTEGER_CST)
3322 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3323
3324 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3325 show no longer constant. */
3326 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3327 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3328 TREE_CONSTANT (expr) = 0;
3329
3330 return expr;
3331 }
3332
3333 #include "gt-ada-utils.h"
3334 #include "gtype-ada.h"