tm.texi (Storage Layout): Remove ROUND_TYPE_SIZE and ROUND_TYPE_SIZE_UNIT.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 * *
26 ****************************************************************************/
27
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "tm.h"
32 #include "tree.h"
33 #include "flags.h"
34 #include "defaults.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "debug.h"
39 #include "convert.h"
40
41 #include "ada.h"
42 #include "types.h"
43 #include "atree.h"
44 #include "elists.h"
45 #include "namet.h"
46 #include "nlists.h"
47 #include "stringt.h"
48 #include "uintp.h"
49 #include "fe.h"
50 #include "sinfo.h"
51 #include "einfo.h"
52 #include "ada-tree.h"
53 #include "gigi.h"
54
55 #ifndef MAX_FIXED_MODE_SIZE
56 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #endif
58
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 #endif
62
63 /* If nonzero, pretend we are allocating at global level. */
64 int force_global;
65
66 /* Tree nodes for the various types and decls we create. */
67 tree gnat_std_decls[(int) ADT_LAST];
68
69 /* Functions to call for each of the possible raise reasons. */
70 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
71
72 /* Associates a GNAT tree node to a GCC tree node. It is used in
73 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
74 of `save_gnu_tree' for more info. */
75 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
76
77 /* This listhead is used to record any global objects that need elaboration.
78 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
79 initial value to assign. */
80
81 static GTY(()) tree pending_elaborations;
82
83 /* This stack allows us to momentarily switch to generating elaboration
84 lists for an inner context. */
85
86 struct e_stack GTY(()) {
87 struct e_stack *next;
88 tree elab_list;
89 };
90 static GTY(()) struct e_stack *elist_stack;
91
92 /* This variable keeps a table for types for each precision so that we only
93 allocate each of them once. Signed and unsigned types are kept separate.
94
95 Note that these types are only used when fold-const requests something
96 special. Perhaps we should NOT share these types; we'll see how it
97 goes later. */
98 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
99
100 /* Likewise for float types, but record these by mode. */
101 static GTY(()) tree float_types[NUM_MACHINE_MODES];
102
103 /* For each binding contour we allocate a binding_level structure which records
104 the entities defined or declared in that contour. Contours include:
105
106 the global one
107 one for each subprogram definition
108 one for each compound statement (declare block)
109
110 Binding contours are used to create GCC tree BLOCK nodes. */
111
112 struct binding_level GTY(())
113 {
114 /* A chain of ..._DECL nodes for all variables, constants, functions,
115 parameters and type declarations. These ..._DECL nodes are chained
116 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
117 in the reverse of the order supplied to be compatible with the
118 back-end. */
119 tree names;
120 /* For each level (except the global one), a chain of BLOCK nodes for all
121 the levels that were entered and exited one level down from this one. */
122 tree blocks;
123 /* The BLOCK node for this level, if one has been preallocated.
124 If 0, the BLOCK is allocated (if needed) when the level is popped. */
125 tree this_block;
126 /* The binding level containing this one (the enclosing binding level). */
127 struct binding_level *level_chain;
128 };
129
130 /* The binding level currently in effect. */
131 static GTY(()) struct binding_level *current_binding_level;
132
133 /* A chain of binding_level structures awaiting reuse. */
134 static GTY((deletable (""))) struct binding_level *free_binding_level;
135
136 /* The outermost binding level. This binding level is created when the
137 compiler is started and it will exist through the entire compilation. */
138 static struct binding_level *global_binding_level;
139
140 /* Binding level structures are initialized by copying this one. */
141 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
142
143 struct language_function GTY(())
144 {
145 int unused;
146 };
147
148 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
149 static tree compute_related_constant PARAMS ((tree, tree));
150 static tree split_plus PARAMS ((tree, tree *));
151 static int value_zerop PARAMS ((tree));
152 static tree float_type_for_size PARAMS ((int, enum machine_mode));
153 static tree convert_to_fat_pointer PARAMS ((tree, tree));
154 static tree convert_to_thin_pointer PARAMS ((tree, tree));
155 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
156 tree));
157 \f
158 /* Initialize the association of GNAT nodes to GCC trees. */
159
160 void
161 init_gnat_to_gnu ()
162 {
163 Node_Id gnat_node;
164
165 associate_gnat_to_gnu = (tree *) ggc_alloc (max_gnat_nodes * sizeof (tree));
166
167 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
168 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
169
170 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
171 }
172
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
176
177 If GNU_DECL is zero, a previous association is to be reset. */
178
179 void
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
182 tree gnu_decl;
183 int no_check;
184 {
185 if (gnu_decl
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
188 gigi_abort (401);
189
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
191 }
192
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
196
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
199
200 tree
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
203 {
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
205 gigi_abort (402);
206
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
208 }
209
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
211
212 int
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
215 {
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
217 }
218
219 \f
220 /* Return non-zero if we are currently in the global binding level. */
221
222 int
223 global_bindings_p ()
224 {
225 return (force_global != 0 || current_binding_level == global_binding_level
226 ? -1 : 0);
227 }
228
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
231
232 tree
233 getdecls ()
234 {
235 return current_binding_level->names;
236 }
237
238 /* Nonzero if the current level needs to have a BLOCK made. */
239
240 int
241 kept_level_p ()
242 {
243 return (current_binding_level->names != 0);
244 }
245
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
248
249 void
250 pushlevel (ignore)
251 int ignore ATTRIBUTE_UNUSED;
252 {
253 struct binding_level *newlevel = NULL;
254
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
257 {
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
260 }
261 else
262 newlevel
263 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
264
265 *newlevel = clear_binding_level;
266
267 /* Add this level to the front of the chain (stack) of levels that are
268 active. */
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
271 }
272
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
276
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
280
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
283 label names.
284
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
287
288 tree
289 poplevel (keep, reverse, functionbody)
290 int keep;
291 int reverse;
292 int functionbody;
293 {
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
296 routine. */
297 tree block = NULL_TREE;
298 tree decl_chain;
299 tree decl_node;
300 tree subblock_chain = current_binding_level->blocks;
301 tree subblock_node;
302 int block_previously_created;
303
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
307 the right order. */
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
311
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
318 {
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
322 }
323
324 block = 0;
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
330 if (block != 0)
331 {
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
334 }
335
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
340
341 /* Clear out the meanings of the local variables of this level. */
342
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
349 {
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
354 }
355
356 {
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
362 }
363
364 if (functionbody)
365 {
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
369 instead. */
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
372 }
373 else if (block)
374 {
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
378 }
379
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
383 else. */
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
387 if (block)
388 TREE_USED (block) = 1;
389
390 return block;
391 }
392 \f
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
396
397 void
398 insert_block (block)
399 tree block;
400 {
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
404 }
405
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
408
409 void
410 set_block (block)
411 tree block;
412 {
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
415 BLOCK_VARS (block));
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
418 }
419
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
422
423 tree
424 pushdecl (decl)
425 tree decl;
426 {
427 struct binding_level *b;
428
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
432 {
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
435 }
436 else
437 {
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
440 }
441
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
445
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
450 {
451 TREE_CHAIN (decl) = b->names;
452 b->names = decl;
453 }
454
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
462 for now). */
463
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
472
473 return decl;
474 }
475 \f
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
478
479 void
480 gnat_init_decl_processing ()
481 {
482 input_line = 0;
483
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
488 pushlevel (0);
489 global_binding_level = current_binding_level;
490
491 build_common_tree_nodes (0);
492
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
497 build_common_tree_nodes_2 (0);
498
499 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
500
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
504 integer_type_node));
505 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
506 char_type_node));
507
508 ptr_void_type_node = build_pointer_type (void_type_node);
509
510 }
511
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
514
515 void
516 init_gigi_decls (long_long_float_type, exception_type)
517 tree long_long_float_type, exception_type;
518 {
519 tree endlink, decl;
520 unsigned int i;
521
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
524 char type. */
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
526 {
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
534 }
535 else
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
537
538 except_type_node = TREE_TYPE (exception_type);
539
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
543
544 void_type_decl_node
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
546 void_type_node));
547
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
550
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
553
554 /* malloc is a function declaration tree for a function to allocate
555 memory. */
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
557 NULL_TREE,
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
560 sizetype,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0);
563
564 /* free is a function declaration tree for a function to free memory. */
565
566 free_decl
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
568 build_function_type (void_type_node,
569 tree_cons (NULL_TREE,
570 ptr_void_type_node,
571 endlink)),
572 NULL_TREE, 0, 1, 1, 0);
573
574 /* Make the types and functions used for exception processing. */
575 jmpbuf_type
576 = build_array_type (gnat_type_for_mode (Pmode, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
580
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
582 get_jmpbuf_decl
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0);
587
588 set_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
591 NULL_TREE,
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0);
595
596 /* Function to get the current exception. */
597 get_excptr_decl
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
600 NULL_TREE,
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0);
603
604 /* Functions that raise exceptions. */
605 raise_nodefer_decl
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
611 endlink)),
612 NULL_TREE, 0, 1, 1, 0);
613
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
618 {
619 decl
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
622 build_function_type (void_type_node,
623 tree_cons (NULL_TREE,
624 build_pointer_type (char_type_node),
625 tree_cons (NULL_TREE,
626 integer_type_node,
627 endlink))),
628 NULL_TREE, 0, 1, 1, 0);
629
630 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
631 gnat_raise_decls[i] = decl;
632 }
633 else
634 /* Otherwise, make one decl for each exception reason. */
635 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
636 {
637 char name[17];
638
639 sprintf (name, "__gnat_rcheck_%.2d", i);
640 gnat_raise_decls[i]
641 = create_subprog_decl
642 (get_identifier (name), NULL_TREE,
643 build_function_type (void_type_node,
644 tree_cons (NULL_TREE,
645 build_pointer_type
646 (char_type_node),
647 tree_cons (NULL_TREE,
648 integer_type_node,
649 endlink))),
650 NULL_TREE, 0, 1, 1, 0);
651 }
652
653 /* Indicate that these never return. */
654 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
655 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
656 TREE_TYPE (raise_nodefer_decl)
657 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
658 TYPE_QUAL_VOLATILE);
659
660 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
661 {
662 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
663 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
664 TREE_TYPE (gnat_raise_decls[i])
665 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
666 TYPE_QUAL_VOLATILE);
667 }
668
669 /* setjmp returns an integer and has one operand, which is a pointer to
670 a jmpbuf. */
671 setjmp_decl
672 = create_subprog_decl
673 (get_identifier ("setjmp"), NULL_TREE,
674 build_function_type (integer_type_node,
675 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
676 NULL_TREE, 0, 1, 1, 0);
677
678 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
679 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
680
681 main_identifier_node = get_identifier ("main");
682 }
683 \f
684 /* This function is called indirectly from toplev.c to handle incomplete
685 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
686 compile_file in toplev.c makes an indirect call through the function pointer
687 incomplete_decl_finalize_hook which is initialized to this routine in
688 init_decl_processing. */
689
690 void
691 gnat_finish_incomplete_decl (dont_care)
692 tree dont_care ATTRIBUTE_UNUSED;
693 {
694 gigi_abort (405);
695 }
696 \f
697 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
698 nodes (FIELDLIST), finish constructing the record or union type.
699 If HAS_REP is nonzero, this record has a rep clause; don't call
700 layout_type but merely set the size and alignment ourselves.
701 If DEFER_DEBUG is nonzero, do not call the debugging routines
702 on this type; it will be done later. */
703
704 void
705 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
706 tree record_type;
707 tree fieldlist;
708 int has_rep;
709 int defer_debug;
710 {
711 enum tree_code code = TREE_CODE (record_type);
712 tree ada_size = bitsize_zero_node;
713 tree size = bitsize_zero_node;
714 tree size_unit = size_zero_node;
715 int var_size = 0;
716 tree field;
717
718 TYPE_FIELDS (record_type) = fieldlist;
719
720 if (TYPE_NAME (record_type) != 0
721 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
722 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
723 else
724 TYPE_STUB_DECL (record_type)
725 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
726 record_type));
727
728 /* We don't need both the typedef name and the record name output in
729 the debugging information, since they are the same. */
730 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
731
732 /* Globally initialize the record first. If this is a rep'ed record,
733 that just means some initializations; otherwise, layout the record. */
734
735 if (has_rep)
736 {
737 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
738 TYPE_MODE (record_type) = BLKmode;
739 if (TYPE_SIZE (record_type) == 0)
740 {
741 TYPE_SIZE (record_type) = bitsize_zero_node;
742 TYPE_SIZE_UNIT (record_type) = size_zero_node;
743 }
744 }
745 else
746 {
747 /* Ensure there isn't a size already set. There can be in an error
748 case where there is a rep clause but all fields have errors and
749 no longer have a position. */
750 TYPE_SIZE (record_type) = 0;
751 layout_type (record_type);
752 }
753
754 /* At this point, the position and size of each field is known. It was
755 either set before entry by a rep clause, or by laying out the type
756 above. We now make a pass through the fields (in reverse order for
757 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
758 (for rep'ed records that are not padding types); and the mode (for
759 rep'ed records). */
760
761 if (code == QUAL_UNION_TYPE)
762 fieldlist = nreverse (fieldlist);
763
764 for (field = fieldlist; field; field = TREE_CHAIN (field))
765 {
766 tree type = TREE_TYPE (field);
767 tree this_size = DECL_SIZE (field);
768 tree this_size_unit = DECL_SIZE_UNIT (field);
769 tree this_ada_size = DECL_SIZE (field);
770
771 /* We need to make an XVE/XVU record if any field has variable size,
772 whether or not the record does. For example, if we have an union,
773 it may be that all fields, rounded up to the alignment, have the
774 same size, in which case we'll use that size. But the debug
775 output routines (except Dwarf2) won't be able to output the fields,
776 so we need to make the special record. */
777 if (TREE_CODE (this_size) != INTEGER_CST)
778 var_size = 1;
779
780 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
781 || TREE_CODE (type) == QUAL_UNION_TYPE)
782 && ! TYPE_IS_FAT_POINTER_P (type)
783 && ! TYPE_CONTAINS_TEMPLATE_P (type)
784 && TYPE_ADA_SIZE (type) != 0)
785 this_ada_size = TYPE_ADA_SIZE (type);
786
787 if (has_rep && ! DECL_BIT_FIELD (field))
788 TYPE_ALIGN (record_type)
789 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
790
791 switch (code)
792 {
793 case UNION_TYPE:
794 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
795 size = size_binop (MAX_EXPR, size, this_size);
796 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
797 break;
798
799 case QUAL_UNION_TYPE:
800 ada_size
801 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
802 this_ada_size, ada_size));
803 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
804 this_size, size));
805 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
806 this_size_unit, size_unit));
807 break;
808
809 case RECORD_TYPE:
810 /* Since we know here that all fields are sorted in order of
811 increasing bit position, the size of the record is one
812 higher than the ending bit of the last field processed
813 unless we have a rep clause, since in that case we might
814 have a field outside a QUAL_UNION_TYPE that has a higher ending
815 position. So use a MAX in that case. Also, if this field is a
816 QUAL_UNION_TYPE, we need to take into account the previous size in
817 the case of empty variants. */
818 ada_size
819 = merge_sizes (ada_size, bit_position (field), this_ada_size,
820 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
821 size = merge_sizes (size, bit_position (field), this_size,
822 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
823 size_unit
824 = merge_sizes (size_unit, byte_position (field), this_size_unit,
825 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
826 break;
827
828 default:
829 abort ();
830 }
831 }
832
833 if (code == QUAL_UNION_TYPE)
834 nreverse (fieldlist);
835
836 /* If this is a padding record, we never want to make the size smaller than
837 what was specified in it, if any. */
838 if (TREE_CODE (record_type) == RECORD_TYPE
839 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
840 {
841 size = TYPE_SIZE (record_type);
842 size_unit = TYPE_SIZE_UNIT (record_type);
843 }
844
845 /* Now set any of the values we've just computed that apply. */
846 if (! TYPE_IS_FAT_POINTER_P (record_type)
847 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
848 SET_TYPE_ADA_SIZE (record_type, ada_size);
849
850 size = round_up (size, TYPE_ALIGN (record_type));
851 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
852
853 if (has_rep
854 && ! (TREE_CODE (record_type) == RECORD_TYPE
855 && TYPE_IS_PADDING_P (record_type)
856 && TREE_CODE (size) != INTEGER_CST
857 && contains_placeholder_p (size)))
858 {
859 TYPE_SIZE (record_type) = size;
860 TYPE_SIZE_UNIT (record_type) = size_unit;
861 }
862
863 if (has_rep)
864 compute_record_mode (record_type);
865
866 if (! defer_debug)
867 {
868 /* If this record is of variable size, rename it so that the
869 debugger knows it is and make a new, parallel, record
870 that tells the debugger how the record is laid out. See
871 exp_dbug.ads. */
872 if (var_size)
873 {
874 tree new_record_type
875 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
876 ? UNION_TYPE : TREE_CODE (record_type));
877 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
878 tree new_id
879 = concat_id_with_name (orig_id,
880 TREE_CODE (record_type) == QUAL_UNION_TYPE
881 ? "XVU" : "XVE");
882 tree last_pos = bitsize_zero_node;
883 tree old_field;
884
885 TYPE_NAME (new_record_type) = new_id;
886 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
887 TYPE_STUB_DECL (new_record_type)
888 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
889 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
890 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
891 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
892 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
893
894 /* Now scan all the fields, replacing each field with a new
895 field corresponding to the new encoding. */
896 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
897 old_field = TREE_CHAIN (old_field))
898 {
899 tree field_type = TREE_TYPE (old_field);
900 tree field_name = DECL_NAME (old_field);
901 tree new_field;
902 tree curpos = bit_position (old_field);
903 int var = 0;
904 unsigned int align = 0;
905 tree pos;
906
907 /* See how the position was modified from the last position.
908
909 There are two basic cases we support: a value was added
910 to the last position or the last position was rounded to
911 a boundary and they something was added. Check for the
912 first case first. If not, see if there is any evidence
913 of rounding. If so, round the last position and try
914 again.
915
916 If this is a union, the position can be taken as zero. */
917
918 if (TREE_CODE (new_record_type) == UNION_TYPE)
919 pos = bitsize_zero_node, align = 0;
920 else
921 pos = compute_related_constant (curpos, last_pos);
922
923 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
924 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
925 {
926 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
927 pos = compute_related_constant (curpos,
928 round_up (last_pos, align));
929 }
930 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
931 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
932 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
933 && host_integerp (TREE_OPERAND
934 (TREE_OPERAND (curpos, 0), 1),
935 1))
936 {
937 align
938 = tree_low_cst
939 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
940 pos = compute_related_constant (curpos,
941 round_up (last_pos, align));
942 }
943
944 /* If we can't compute a position, set it to zero.
945
946 ??? We really should abort here, but it's too much work
947 to get this correct for all cases. */
948
949 if (pos == 0)
950 pos = bitsize_zero_node;
951
952 /* See if this type is variable-size and make a new type
953 and indicate the indirection if so. */
954 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
955 {
956 field_type = build_pointer_type (field_type);
957 var = 1;
958 }
959
960 /* Make a new field name, if necessary. */
961 if (var || align != 0)
962 {
963 char suffix[6];
964
965 if (align != 0)
966 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
967 align / BITS_PER_UNIT);
968 else
969 strcpy (suffix, "XVL");
970
971 field_name = concat_id_with_name (field_name, suffix);
972 }
973
974 new_field = create_field_decl (field_name, field_type,
975 new_record_type, 0,
976 DECL_SIZE (old_field), pos, 0);
977 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
978 TYPE_FIELDS (new_record_type) = new_field;
979
980 /* If old_field is a QUAL_UNION_TYPE, take its size as being
981 zero. The only time it's not the last field of the record
982 is when there are other components at fixed positions after
983 it (meaning there was a rep clause for every field) and we
984 want to be able to encode them. */
985 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
986 (TREE_CODE (TREE_TYPE (old_field))
987 == QUAL_UNION_TYPE)
988 ? bitsize_zero_node
989 : DECL_SIZE (old_field));
990 }
991
992 TYPE_FIELDS (new_record_type)
993 = nreverse (TYPE_FIELDS (new_record_type));
994
995 rest_of_type_compilation (new_record_type, global_bindings_p ());
996 }
997
998 rest_of_type_compilation (record_type, global_bindings_p ());
999 }
1000 }
1001
1002 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1003 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1004 if this represents a QUAL_UNION_TYPE in which case we must look for
1005 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1006 is nonzero, we must take the MAX of the end position of this field
1007 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1008
1009 We return an expression for the size. */
1010
1011 static tree
1012 merge_sizes (last_size, first_bit, size, special, has_rep)
1013 tree last_size;
1014 tree first_bit, size;
1015 int special;
1016 int has_rep;
1017 {
1018 tree type = TREE_TYPE (last_size);
1019
1020 if (! special || TREE_CODE (size) != COND_EXPR)
1021 {
1022 tree new = size_binop (PLUS_EXPR, first_bit, size);
1023
1024 if (has_rep)
1025 new = size_binop (MAX_EXPR, last_size, new);
1026
1027 return new;
1028 }
1029
1030 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1031 integer_zerop (TREE_OPERAND (size, 1))
1032 ? last_size : merge_sizes (last_size, first_bit,
1033 TREE_OPERAND (size, 1),
1034 1, has_rep),
1035 integer_zerop (TREE_OPERAND (size, 2))
1036 ? last_size : merge_sizes (last_size, first_bit,
1037 TREE_OPERAND (size, 2),
1038 1, has_rep)));
1039 }
1040
1041 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1042 related by the addition of a constant. Return that constant if so. */
1043
1044 static tree
1045 compute_related_constant (op0, op1)
1046 tree op0, op1;
1047 {
1048 tree op0_var, op1_var;
1049 tree op0_con = split_plus (op0, &op0_var);
1050 tree op1_con = split_plus (op1, &op1_var);
1051 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1052
1053 if (operand_equal_p (op0_var, op1_var, 0))
1054 return result;
1055 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1056 return result;
1057 else
1058 return 0;
1059 }
1060
1061 /* Utility function of above to split a tree OP which may be a sum, into a
1062 constant part, which is returned, and a variable part, which is stored
1063 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1064 sizetype. */
1065
1066 static tree
1067 split_plus (in, pvar)
1068 tree in;
1069 tree *pvar;
1070 {
1071 tree result = bitsize_zero_node;
1072
1073 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1074 in = TREE_OPERAND (in, 0);
1075
1076 *pvar = in;
1077 if (TREE_CODE (in) == INTEGER_CST)
1078 {
1079 *pvar = bitsize_zero_node;
1080 return in;
1081 }
1082 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1083 {
1084 tree lhs_var, rhs_var;
1085 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1086 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1087
1088 result = size_binop (PLUS_EXPR, result, lhs_con);
1089 result = size_binop (TREE_CODE (in), result, rhs_con);
1090
1091 if (lhs_var == TREE_OPERAND (in, 0)
1092 && rhs_var == TREE_OPERAND (in, 1))
1093 return bitsize_zero_node;
1094
1095 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1096 return result;
1097 }
1098 else
1099 return bitsize_zero_node;
1100 }
1101 \f
1102 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1103 subprogram. If it is void_type_node, then we are dealing with a procedure,
1104 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1105 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1106 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1107 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1108 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1109 RETURNS_WITH_DSP is nonzero if the function is to return with a
1110 depressed stack pointer. */
1111
1112 tree
1113 create_subprog_type (return_type, param_decl_list, cico_list,
1114 returns_unconstrained, returns_by_ref, returns_with_dsp)
1115 tree return_type;
1116 tree param_decl_list;
1117 tree cico_list;
1118 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1119 {
1120 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1121 the subprogram formal parameters. This list is generated by traversing the
1122 input list of PARM_DECL nodes. */
1123 tree param_type_list = NULL;
1124 tree param_decl;
1125 tree type;
1126
1127 for (param_decl = param_decl_list; param_decl;
1128 param_decl = TREE_CHAIN (param_decl))
1129 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1130 param_type_list);
1131
1132 /* The list of the function parameter types has to be terminated by the void
1133 type to signal to the back-end that we are not dealing with a variable
1134 parameter subprogram, but that the subprogram has a fixed number of
1135 parameters. */
1136 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1137
1138 /* The list of argument types has been created in reverse
1139 so nreverse it. */
1140 param_type_list = nreverse (param_type_list);
1141
1142 type = build_function_type (return_type, param_type_list);
1143
1144 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1145 or the new type should, make a copy of TYPE. Likewise for
1146 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1147 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1148 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1149 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1150 type = copy_type (type);
1151
1152 SET_TYPE_CI_CO_LIST (type, cico_list);
1153 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1154 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1155 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1156 return type;
1157 }
1158 \f
1159 /* Return a copy of TYPE but safe to modify in any way. */
1160
1161 tree
1162 copy_type (type)
1163 tree type;
1164 {
1165 tree new = copy_node (type);
1166
1167 /* copy_node clears this field instead of copying it, because it is
1168 aliased with TREE_CHAIN. */
1169 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1170
1171 TYPE_POINTER_TO (new) = 0;
1172 TYPE_REFERENCE_TO (new) = 0;
1173 TYPE_MAIN_VARIANT (new) = new;
1174 TYPE_NEXT_VARIANT (new) = 0;
1175
1176 return new;
1177 }
1178 \f
1179 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1180 TYPE_INDEX_TYPE is INDEX. */
1181
1182 tree
1183 create_index_type (min, max, index)
1184 tree min, max;
1185 tree index;
1186 {
1187 /* First build a type for the desired range. */
1188 tree type = build_index_2_type (min, max);
1189
1190 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1191 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1192 is set, but not to INDEX, make a copy of this type with the requested
1193 index type. Note that we have no way of sharing these types, but that's
1194 only a small hole. */
1195 if (TYPE_INDEX_TYPE (type) == index)
1196 return type;
1197 else if (TYPE_INDEX_TYPE (type) != 0)
1198 type = copy_type (type);
1199
1200 SET_TYPE_INDEX_TYPE (type, index);
1201 return type;
1202 }
1203 \f
1204 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1205 string) and TYPE is a ..._TYPE node giving its data type.
1206 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1207 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1208 information about this type. */
1209
1210 tree
1211 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1212 tree type_name;
1213 tree type;
1214 struct attrib *attr_list;
1215 int artificial_p;
1216 int debug_info_p;
1217 {
1218 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1219 enum tree_code code = TREE_CODE (type);
1220
1221 DECL_ARTIFICIAL (type_decl) = artificial_p;
1222 pushdecl (type_decl);
1223 process_attributes (type_decl, attr_list);
1224
1225 /* Pass type declaration information to the debugger unless this is an
1226 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1227 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1228 a dummy type, which will be completed later, or a type for which
1229 debugging information was not requested. */
1230 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1231 || ! debug_info_p)
1232 DECL_IGNORED_P (type_decl) = 1;
1233 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1234 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1235 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1236 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1237
1238 return type_decl;
1239 }
1240
1241 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1242 ASM_NAME is its assembler name (if provided). TYPE is its data type
1243 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1244 expression; NULL_TREE if none.
1245
1246 CONST_FLAG is nonzero if this variable is constant.
1247
1248 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1249 the current compilation unit. This flag should be set when processing the
1250 variable definitions in a package specification. EXTERN_FLAG is nonzero
1251 when processing an external variable declaration (as opposed to a
1252 definition: no storage is to be allocated for the variable here).
1253
1254 STATIC_FLAG is only relevant when not at top level. In that case
1255 it indicates whether to always allocate storage to the variable. */
1256
1257 tree
1258 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1259 extern_flag, static_flag, attr_list)
1260 tree var_name;
1261 tree asm_name;
1262 tree type;
1263 tree var_init;
1264 int const_flag;
1265 int public_flag;
1266 int extern_flag;
1267 int static_flag;
1268 struct attrib *attr_list;
1269 {
1270 int init_const
1271 = (var_init == 0
1272 ? 0
1273 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1274 && (global_bindings_p () || static_flag
1275 ? 0 != initializer_constant_valid_p (var_init,
1276 TREE_TYPE (var_init))
1277 : TREE_CONSTANT (var_init))));
1278 tree var_decl
1279 = build_decl ((const_flag && init_const
1280 /* Only make a CONST_DECL for sufficiently-small objects.
1281 We consider complex double "sufficiently-small" */
1282 && TYPE_SIZE (type) != 0
1283 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1284 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1285 GET_MODE_SIZE (DCmode)))
1286 ? CONST_DECL : VAR_DECL, var_name, type);
1287 tree assign_init = 0;
1288
1289 /* If this is external, throw away any initializations unless this is a
1290 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1291 we are defining a global here, leave a constant initialization and save
1292 any variable elaborations for the elaboration routine. Otherwise, if
1293 the initializing expression is not the same as TYPE, generate the
1294 initialization with an assignment statement, since it knows how
1295 to do the required adjustents. If we are just annotating types,
1296 throw away the initialization if it isn't a constant. */
1297
1298 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1299 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1300 var_init = 0;
1301
1302 if (global_bindings_p () && var_init != 0 && ! init_const)
1303 {
1304 add_pending_elaborations (var_decl, var_init);
1305 var_init = 0;
1306 }
1307
1308 else if (var_init != 0
1309 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1310 != TYPE_MAIN_VARIANT (type))
1311 || (static_flag && ! init_const)))
1312 assign_init = var_init, var_init = 0;
1313
1314 DECL_COMMON (var_decl) = !flag_no_common;
1315 DECL_INITIAL (var_decl) = var_init;
1316 TREE_READONLY (var_decl) = const_flag;
1317 DECL_EXTERNAL (var_decl) = extern_flag;
1318 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1319 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1320 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1321 = TYPE_VOLATILE (type);
1322
1323 /* At the global binding level we need to allocate static storage for the
1324 variable if and only if its not external. If we are not at the top level
1325 we allocate automatic storage unless requested not to. */
1326 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1327
1328 if (asm_name != 0)
1329 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1330
1331 process_attributes (var_decl, attr_list);
1332
1333 /* Add this decl to the current binding level and generate any
1334 needed code and RTL. */
1335 var_decl = pushdecl (var_decl);
1336 expand_decl (var_decl);
1337
1338 if (DECL_CONTEXT (var_decl) != 0)
1339 expand_decl_init (var_decl);
1340
1341 /* If this is volatile, force it into memory. */
1342 if (TREE_SIDE_EFFECTS (var_decl))
1343 gnat_mark_addressable (var_decl);
1344
1345 if (TREE_CODE (var_decl) != CONST_DECL)
1346 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1347
1348 if (assign_init != 0)
1349 {
1350 /* If VAR_DECL has a padded type, convert it to the unpadded
1351 type so the assignment is done properly. */
1352 tree lhs = var_decl;
1353
1354 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1355 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1356 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1357
1358 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1359 assign_init));
1360 }
1361
1362 return var_decl;
1363 }
1364 \f
1365 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1366 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1367 this field is in a record type with a "pragma pack". If SIZE is nonzero
1368 it is the specified size for this field. If POS is nonzero, it is the bit
1369 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1370 the address of this field for aliasing purposes. */
1371
1372 tree
1373 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1374 addressable)
1375 tree field_name;
1376 tree field_type;
1377 tree record_type;
1378 int packed;
1379 tree size, pos;
1380 int addressable;
1381 {
1382 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1383
1384 DECL_CONTEXT (field_decl) = record_type;
1385 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1386
1387 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1388 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1389 If it is a padding type where the inner field is of variable size, it
1390 must be at its natural alignment. Just handle the packed case here; we
1391 will disallow non-aligned rep clauses elsewhere. */
1392 if (packed && TYPE_MODE (field_type) == BLKmode)
1393 DECL_ALIGN (field_decl)
1394 = ((TREE_CODE (field_type) == RECORD_TYPE
1395 && TYPE_IS_PADDING_P (field_type)
1396 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1397 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1398
1399 /* If a size is specified, use it. Otherwise, see if we have a size
1400 to use that may differ from the natural size of the object. */
1401 if (size != 0)
1402 size = convert (bitsizetype, size);
1403 else if (packed)
1404 {
1405 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1406 TYPE_SIZE (field_type), 0))
1407 size = rm_size (field_type);
1408
1409 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1410 byte. */
1411 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1412 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1413 size = round_up (size, BITS_PER_UNIT);
1414 }
1415
1416 /* Make a bitfield if a size is specified for two reasons: first if the size
1417 differs from the natural size. Second, if the alignment is insufficient.
1418 There are a number of ways the latter can be true. But never make a
1419 bitfield if the type of the field has a nonconstant size. */
1420
1421 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1422 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1423 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1424 || (pos != 0
1425 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1426 bitsize_int (TYPE_ALIGN
1427 (field_type)))))
1428 || packed
1429 || (TYPE_ALIGN (record_type) != 0
1430 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1431 {
1432 DECL_BIT_FIELD (field_decl) = 1;
1433 DECL_SIZE (field_decl) = size;
1434 if (! packed && pos == 0)
1435 DECL_ALIGN (field_decl)
1436 = (TYPE_ALIGN (record_type) != 0
1437 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1438 : TYPE_ALIGN (field_type));
1439 }
1440
1441 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1442 DECL_ALIGN (field_decl)
1443 = MAX (DECL_ALIGN (field_decl),
1444 DECL_BIT_FIELD (field_decl) ? 1
1445 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1446 : TYPE_ALIGN (field_type));
1447
1448 if (pos != 0)
1449 {
1450 /* We need to pass in the alignment the DECL is known to have.
1451 This is the lowest-order bit set in POS, but no more than
1452 the alignment of the record, if one is specified. Note
1453 that an alignment of 0 is taken as infinite. */
1454 unsigned int known_align;
1455
1456 if (host_integerp (pos, 1))
1457 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1458 else
1459 known_align = BITS_PER_UNIT;
1460
1461 if (TYPE_ALIGN (record_type)
1462 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1463 known_align = TYPE_ALIGN (record_type);
1464
1465 layout_decl (field_decl, known_align);
1466 SET_DECL_OFFSET_ALIGN (field_decl,
1467 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1468 : BITS_PER_UNIT);
1469 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1470 &DECL_FIELD_BIT_OFFSET (field_decl),
1471 DECL_OFFSET_ALIGN (field_decl), pos);
1472
1473 DECL_HAS_REP_P (field_decl) = 1;
1474 }
1475
1476 /* If the field type is passed by reference, we will have pointers to the
1477 field, so it is addressable. */
1478 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1479 addressable = 1;
1480
1481 /* Mark the decl as nonaddressable if it either is indicated so semantically
1482 or if it is a bit field. */
1483 DECL_NONADDRESSABLE_P (field_decl)
1484 = ! addressable || DECL_BIT_FIELD (field_decl);
1485
1486 return field_decl;
1487 }
1488
1489 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1490 effects, has the value of zero. */
1491
1492 static int
1493 value_zerop (exp)
1494 tree exp;
1495 {
1496 if (TREE_CODE (exp) == COMPOUND_EXPR)
1497 return value_zerop (TREE_OPERAND (exp, 1));
1498
1499 return integer_zerop (exp);
1500 }
1501 \f
1502 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1503 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1504 readonly (either an IN parameter or an address of a pass-by-ref
1505 parameter). */
1506
1507 tree
1508 create_param_decl (param_name, param_type, readonly)
1509 tree param_name;
1510 tree param_type;
1511 int readonly;
1512 {
1513 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1514
1515 DECL_ARG_TYPE (param_decl) = param_type;
1516 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1517 TREE_READONLY (param_decl) = readonly;
1518 return param_decl;
1519 }
1520 \f
1521 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1522
1523 void
1524 process_attributes (decl, attr_list)
1525 tree decl;
1526 struct attrib *attr_list;
1527 {
1528 for (; attr_list; attr_list = attr_list->next)
1529 switch (attr_list->type)
1530 {
1531 case ATTR_MACHINE_ATTRIBUTE:
1532 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1533 NULL_TREE),
1534 ATTR_FLAG_TYPE_IN_PLACE);
1535 break;
1536
1537 case ATTR_LINK_ALIAS:
1538 TREE_STATIC (decl) = 1;
1539 assemble_alias (decl, attr_list->name);
1540 break;
1541
1542 case ATTR_WEAK_EXTERNAL:
1543 if (SUPPORTS_WEAK)
1544 declare_weak (decl);
1545 else
1546 post_error ("?weak declarations not supported on this target",
1547 attr_list->error_point);
1548 break;
1549
1550 case ATTR_LINK_SECTION:
1551 #ifdef ASM_OUTPUT_SECTION_NAME
1552 DECL_SECTION_NAME (decl)
1553 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1554 IDENTIFIER_POINTER (attr_list->name));
1555 DECL_COMMON (decl) = 0;
1556 #else
1557 post_error ("?section attributes are not supported for this target",
1558 attr_list->error_point);
1559 #endif
1560 break;
1561 }
1562 }
1563 \f
1564 /* Add some pending elaborations on the list. */
1565
1566 void
1567 add_pending_elaborations (var_decl, var_init)
1568 tree var_decl;
1569 tree var_init;
1570 {
1571 if (var_init != 0)
1572 Check_Elaboration_Code_Allowed (error_gnat_node);
1573
1574 pending_elaborations
1575 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1576 }
1577
1578 /* Obtain any pending elaborations and clear the old list. */
1579
1580 tree
1581 get_pending_elaborations ()
1582 {
1583 /* Each thing added to the list went on the end; we want it on the
1584 beginning. */
1585 tree result = TREE_CHAIN (pending_elaborations);
1586
1587 TREE_CHAIN (pending_elaborations) = 0;
1588 return result;
1589 }
1590
1591 /* Return nonzero if there are pending elaborations. */
1592
1593 int
1594 pending_elaborations_p ()
1595 {
1596 return TREE_CHAIN (pending_elaborations) != 0;
1597 }
1598
1599 /* Save a copy of the current pending elaboration list and make a new
1600 one. */
1601
1602 void
1603 push_pending_elaborations ()
1604 {
1605 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1606
1607 p->next = elist_stack;
1608 p->elab_list = pending_elaborations;
1609 elist_stack = p;
1610 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1611 }
1612
1613 /* Pop the stack of pending elaborations. */
1614
1615 void
1616 pop_pending_elaborations ()
1617 {
1618 struct e_stack *p = elist_stack;
1619
1620 pending_elaborations = p->elab_list;
1621 elist_stack = p->next;
1622 }
1623
1624 /* Return the current position in pending_elaborations so we can insert
1625 elaborations after that point. */
1626
1627 tree
1628 get_elaboration_location ()
1629 {
1630 return tree_last (pending_elaborations);
1631 }
1632
1633 /* Insert the current elaborations after ELAB, which is in some elaboration
1634 list. */
1635
1636 void
1637 insert_elaboration_list (elab)
1638 tree elab;
1639 {
1640 tree next = TREE_CHAIN (elab);
1641
1642 if (TREE_CHAIN (pending_elaborations))
1643 {
1644 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1645 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1646 TREE_CHAIN (pending_elaborations) = 0;
1647 }
1648 }
1649
1650 /* Returns a LABEL_DECL node for LABEL_NAME. */
1651
1652 tree
1653 create_label_decl (label_name)
1654 tree label_name;
1655 {
1656 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1657
1658 DECL_CONTEXT (label_decl) = current_function_decl;
1659 DECL_MODE (label_decl) = VOIDmode;
1660 DECL_SOURCE_LOCATION (label_decl) = input_location;
1661
1662 return label_decl;
1663 }
1664 \f
1665 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1666 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1667 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1668 PARM_DECL nodes chained through the TREE_CHAIN field).
1669
1670 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1671 appropriate fields in the FUNCTION_DECL. */
1672
1673 tree
1674 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1675 inline_flag, public_flag, extern_flag, attr_list)
1676 tree subprog_name;
1677 tree asm_name;
1678 tree subprog_type;
1679 tree param_decl_list;
1680 int inline_flag;
1681 int public_flag;
1682 int extern_flag;
1683 struct attrib *attr_list;
1684 {
1685 tree return_type = TREE_TYPE (subprog_type);
1686 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1687
1688 /* If this is a function nested inside an inlined external function, it
1689 means we aren't going to compile the outer function unless it is
1690 actually inlined, so do the same for us. */
1691 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1692 && DECL_EXTERNAL (current_function_decl))
1693 extern_flag = 1;
1694
1695 DECL_EXTERNAL (subprog_decl) = extern_flag;
1696 TREE_PUBLIC (subprog_decl) = public_flag;
1697 DECL_INLINE (subprog_decl) = inline_flag;
1698 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1699 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1700 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1701 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1702 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1703
1704 if (asm_name != 0)
1705 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1706
1707 process_attributes (subprog_decl, attr_list);
1708
1709 /* Add this decl to the current binding level. */
1710 subprog_decl = pushdecl (subprog_decl);
1711
1712 /* Output the assembler code and/or RTL for the declaration. */
1713 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1714
1715 return subprog_decl;
1716 }
1717 \f
1718 /* Count how deep we are into nested functions. This is because
1719 we shouldn't call the backend function context routines unless we
1720 are in a nested function. */
1721
1722 static int function_nesting_depth;
1723
1724 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1725 body. This routine needs to be invoked before processing the declarations
1726 appearing in the subprogram. */
1727
1728 void
1729 begin_subprog_body (subprog_decl)
1730 tree subprog_decl;
1731 {
1732 tree param_decl_list;
1733 tree param_decl;
1734 tree next_param;
1735
1736 if (function_nesting_depth++ != 0)
1737 push_function_context ();
1738
1739 announce_function (subprog_decl);
1740
1741 /* Make this field nonzero so further routines know that this is not
1742 tentative. error_mark_node is replaced below (in poplevel) with the
1743 adequate BLOCK. */
1744 DECL_INITIAL (subprog_decl) = error_mark_node;
1745
1746 /* This function exists in static storage. This does not mean `static' in
1747 the C sense! */
1748 TREE_STATIC (subprog_decl) = 1;
1749
1750 /* Enter a new binding level. */
1751 current_function_decl = subprog_decl;
1752 pushlevel (0);
1753
1754 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1755 subprogram body) so that they can be recognized as local variables in the
1756 subprogram.
1757
1758 The list of PARM_DECL nodes is stored in the right order in
1759 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1760 which they are transmitted to `pushdecl' we need to reverse the list of
1761 PARM_DECLs if we want it to be stored in the right order. The reason why
1762 we want to make sure the PARM_DECLs are stored in the correct order is
1763 that this list will be retrieved in a few lines with a call to `getdecl'
1764 to store it back into the DECL_ARGUMENTS field. */
1765 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1766
1767 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1768 {
1769 next_param = TREE_CHAIN (param_decl);
1770 TREE_CHAIN (param_decl) = NULL;
1771 pushdecl (param_decl);
1772 }
1773
1774 /* Store back the PARM_DECL nodes. They appear in the right order. */
1775 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1776
1777 init_function_start (subprog_decl, input_filename, input_line);
1778 expand_function_start (subprog_decl, 0);
1779
1780 /* If this function is `main', emit a call to `__main'
1781 to run global initializers, etc. */
1782 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1783 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1784 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1785 expand_main_function ();
1786 }
1787
1788 /* Finish the definition of the current subprogram and compile it all the way
1789 to assembler language output. */
1790
1791 void
1792 end_subprog_body ()
1793 {
1794 tree decl;
1795 tree cico_list;
1796
1797 poplevel (1, 0, 1);
1798 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1799 = current_function_decl;
1800
1801 /* Mark the RESULT_DECL as being in this subprogram. */
1802 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1803
1804 expand_function_end (input_filename, input_line, 0);
1805
1806 /* If this is a nested function, push a new GC context. That will keep
1807 local variables on the stack from being collected while we're doing
1808 the compilation of this function. */
1809 if (function_nesting_depth > 1)
1810 ggc_push_context ();
1811
1812 rest_of_compilation (current_function_decl);
1813
1814 if (function_nesting_depth > 1)
1815 ggc_pop_context ();
1816
1817 #if 0
1818 /* If we're sure this function is defined in this file then mark it
1819 as such */
1820 if (TREE_ASM_WRITTEN (current_function_decl))
1821 mark_fn_defined_in_this_file (current_function_decl);
1822 #endif
1823
1824 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1825 not be seen when we call this function and will be in
1826 unallocated memory anyway. */
1827 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1828 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1829 TREE_VALUE (cico_list) = 0;
1830
1831 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1832 {
1833 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1834 was saved for inline, in which case the DECL_RTLs are in
1835 preserved memory. */
1836 for (decl = DECL_ARGUMENTS (current_function_decl);
1837 decl != 0; decl = TREE_CHAIN (decl))
1838 {
1839 SET_DECL_RTL (decl, 0);
1840 DECL_INCOMING_RTL (decl) = 0;
1841 }
1842
1843 /* Similarly, discard DECL_RTL of the return value. */
1844 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1845
1846 /* But DECL_INITIAL must remain nonzero so we know this
1847 was an actual function definition unless toplev.c decided not
1848 to inline it. */
1849 if (DECL_INITIAL (current_function_decl) != 0)
1850 DECL_INITIAL (current_function_decl) = error_mark_node;
1851
1852 DECL_ARGUMENTS (current_function_decl) = 0;
1853 }
1854
1855 /* If we are not at the bottom of the function nesting stack, pop up to
1856 the containing function. Otherwise show we aren't in any function. */
1857 if (--function_nesting_depth != 0)
1858 pop_function_context ();
1859 else
1860 current_function_decl = 0;
1861 }
1862 \f
1863 /* Return a definition for a builtin function named NAME and whose data type
1864 is TYPE. TYPE should be a function type with argument types.
1865 FUNCTION_CODE tells later passes how to compile calls to this function.
1866 See tree.h for its possible values.
1867
1868 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1869 the name to be called if we can't opencode the function. If
1870 ATTRS is nonzero, use that for the function attribute list. */
1871
1872 tree
1873 builtin_function (name, type, function_code, class, library_name, attrs)
1874 const char *name;
1875 tree type;
1876 int function_code;
1877 enum built_in_class class;
1878 const char *library_name;
1879 tree attrs;
1880 {
1881 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1882
1883 DECL_EXTERNAL (decl) = 1;
1884 TREE_PUBLIC (decl) = 1;
1885 if (library_name)
1886 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1887
1888 pushdecl (decl);
1889 DECL_BUILT_IN_CLASS (decl) = class;
1890 DECL_FUNCTION_CODE (decl) = function_code;
1891 if (attrs)
1892 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1893 return decl;
1894 }
1895
1896 /* Return an integer type with the number of bits of precision given by
1897 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1898 it is a signed type. */
1899
1900 tree
1901 gnat_type_for_size (precision, unsignedp)
1902 unsigned precision;
1903 int unsignedp;
1904 {
1905 tree t;
1906 char type_name[20];
1907
1908 if (precision <= 2 * MAX_BITS_PER_WORD
1909 && signed_and_unsigned_types[precision][unsignedp] != 0)
1910 return signed_and_unsigned_types[precision][unsignedp];
1911
1912 if (unsignedp)
1913 t = make_unsigned_type (precision);
1914 else
1915 t = make_signed_type (precision);
1916
1917 if (precision <= 2 * MAX_BITS_PER_WORD)
1918 signed_and_unsigned_types[precision][unsignedp] = t;
1919
1920 if (TYPE_NAME (t) == 0)
1921 {
1922 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1923 TYPE_NAME (t) = get_identifier (type_name);
1924 }
1925
1926 return t;
1927 }
1928
1929 /* Likewise for floating-point types. */
1930
1931 static tree
1932 float_type_for_size (precision, mode)
1933 int precision;
1934 enum machine_mode mode;
1935 {
1936 tree t;
1937 char type_name[20];
1938
1939 if (float_types[(int) mode] != 0)
1940 return float_types[(int) mode];
1941
1942 float_types[(int) mode] = t = make_node (REAL_TYPE);
1943 TYPE_PRECISION (t) = precision;
1944 layout_type (t);
1945
1946 if (TYPE_MODE (t) != mode)
1947 gigi_abort (414);
1948
1949 if (TYPE_NAME (t) == 0)
1950 {
1951 sprintf (type_name, "FLOAT_%d", precision);
1952 TYPE_NAME (t) = get_identifier (type_name);
1953 }
1954
1955 return t;
1956 }
1957
1958 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1959 an unsigned type; otherwise a signed type is returned. */
1960
1961 tree
1962 gnat_type_for_mode (mode, unsignedp)
1963 enum machine_mode mode;
1964 int unsignedp;
1965 {
1966 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1967 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1968 else
1969 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1970 }
1971
1972 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1973
1974 tree
1975 gnat_unsigned_type (type_node)
1976 tree type_node;
1977 {
1978 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1979
1980 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1981 {
1982 type = copy_node (type);
1983 TREE_TYPE (type) = type_node;
1984 }
1985 else if (TREE_TYPE (type_node) != 0
1986 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1987 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1988 {
1989 type = copy_node (type);
1990 TREE_TYPE (type) = TREE_TYPE (type_node);
1991 }
1992
1993 return type;
1994 }
1995
1996 /* Return the signed version of a TYPE_NODE, a scalar type. */
1997
1998 tree
1999 gnat_signed_type (type_node)
2000 tree type_node;
2001 {
2002 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2003
2004 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2005 {
2006 type = copy_node (type);
2007 TREE_TYPE (type) = type_node;
2008 }
2009 else if (TREE_TYPE (type_node) != 0
2010 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2011 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2012 {
2013 type = copy_node (type);
2014 TREE_TYPE (type) = TREE_TYPE (type_node);
2015 }
2016
2017 return type;
2018 }
2019
2020 /* Return a type the same as TYPE except unsigned or signed according to
2021 UNSIGNEDP. */
2022
2023 tree
2024 gnat_signed_or_unsigned_type (unsignedp, type)
2025 int unsignedp;
2026 tree type;
2027 {
2028 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2029 return type;
2030 else
2031 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2032 }
2033 \f
2034 /* EXP is an expression for the size of an object. If this size contains
2035 discriminant references, replace them with the maximum (if MAX_P) or
2036 minimum (if ! MAX_P) possible value of the discriminant. */
2037
2038 tree
2039 max_size (exp, max_p)
2040 tree exp;
2041 int max_p;
2042 {
2043 enum tree_code code = TREE_CODE (exp);
2044 tree type = TREE_TYPE (exp);
2045
2046 switch (TREE_CODE_CLASS (code))
2047 {
2048 case 'd':
2049 case 'c':
2050 return exp;
2051
2052 case 'x':
2053 if (code == TREE_LIST)
2054 return tree_cons (TREE_PURPOSE (exp),
2055 max_size (TREE_VALUE (exp), max_p),
2056 TREE_CHAIN (exp) != 0
2057 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2058 break;
2059
2060 case 'r':
2061 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2062 modify. Otherwise, we abort since it is something we can't
2063 handle. */
2064 if (! contains_placeholder_p (exp))
2065 gigi_abort (406);
2066
2067 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2068 return
2069 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2070
2071 case '<':
2072 return max_p ? size_one_node : size_zero_node;
2073
2074 case '1':
2075 case '2':
2076 case 'e':
2077 switch (TREE_CODE_LENGTH (code))
2078 {
2079 case 1:
2080 if (code == NON_LVALUE_EXPR)
2081 return max_size (TREE_OPERAND (exp, 0), max_p);
2082 else
2083 return
2084 fold (build1 (code, type,
2085 max_size (TREE_OPERAND (exp, 0),
2086 code == NEGATE_EXPR ? ! max_p : max_p)));
2087
2088 case 2:
2089 if (code == RTL_EXPR)
2090 gigi_abort (407);
2091 else if (code == COMPOUND_EXPR)
2092 return max_size (TREE_OPERAND (exp, 1), max_p);
2093 else if (code == WITH_RECORD_EXPR)
2094 return exp;
2095
2096 {
2097 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2098 tree rhs = max_size (TREE_OPERAND (exp, 1),
2099 code == MINUS_EXPR ? ! max_p : max_p);
2100
2101 /* Special-case wanting the maximum value of a MIN_EXPR.
2102 In that case, if one side overflows, return the other.
2103 sizetype is signed, but we know sizes are non-negative.
2104 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2105 overflowing or the maximum possible value and the RHS
2106 a variable. */
2107 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2108 return lhs;
2109 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2110 return rhs;
2111 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2112 && (TREE_OVERFLOW (lhs)
2113 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2114 && ! TREE_CONSTANT (rhs))
2115 return lhs;
2116 else
2117 return fold (build (code, type, lhs, rhs));
2118 }
2119
2120 case 3:
2121 if (code == SAVE_EXPR)
2122 return exp;
2123 else if (code == COND_EXPR)
2124 return fold (build (MAX_EXPR, type,
2125 max_size (TREE_OPERAND (exp, 1), max_p),
2126 max_size (TREE_OPERAND (exp, 2), max_p)));
2127 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2128 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2129 max_size (TREE_OPERAND (exp, 1), max_p));
2130 }
2131 }
2132
2133 gigi_abort (408);
2134 }
2135 \f
2136 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2137 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2138 Return a constructor for the template. */
2139
2140 tree
2141 build_template (template_type, array_type, expr)
2142 tree template_type;
2143 tree array_type;
2144 tree expr;
2145 {
2146 tree template_elts = NULL_TREE;
2147 tree bound_list = NULL_TREE;
2148 tree field;
2149
2150 if (TREE_CODE (array_type) == RECORD_TYPE
2151 && (TYPE_IS_PADDING_P (array_type)
2152 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2153 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2154
2155 if (TREE_CODE (array_type) == ARRAY_TYPE
2156 || (TREE_CODE (array_type) == INTEGER_TYPE
2157 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2158 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2159
2160 /* First make the list for a CONSTRUCTOR for the template. Go down the
2161 field list of the template instead of the type chain because this
2162 array might be an Ada array of arrays and we can't tell where the
2163 nested arrays stop being the underlying object. */
2164
2165 for (field = TYPE_FIELDS (template_type); field;
2166 (bound_list != 0
2167 ? (bound_list = TREE_CHAIN (bound_list))
2168 : (array_type = TREE_TYPE (array_type))),
2169 field = TREE_CHAIN (TREE_CHAIN (field)))
2170 {
2171 tree bounds, min, max;
2172
2173 /* If we have a bound list, get the bounds from there. Likewise
2174 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2175 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2176 This will give us a maximum range. */
2177 if (bound_list != 0)
2178 bounds = TREE_VALUE (bound_list);
2179 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2180 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2181 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2182 && DECL_BY_COMPONENT_PTR_P (expr))
2183 bounds = TREE_TYPE (field);
2184 else
2185 gigi_abort (411);
2186
2187 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2188 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2189
2190 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2191 surround them with a WITH_RECORD_EXPR giving EXPR as the
2192 OBJECT. */
2193 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2194 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2195 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2196 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2197
2198 template_elts = tree_cons (TREE_CHAIN (field), max,
2199 tree_cons (field, min, template_elts));
2200 }
2201
2202 return gnat_build_constructor (template_type, nreverse (template_elts));
2203 }
2204 \f
2205 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2206 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2207 in the type contains in its DECL_INITIAL the expression to use when
2208 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2209 to print out an error message if the mechanism cannot be applied to
2210 an object of that type and also for the name. */
2211
2212 tree
2213 build_vms_descriptor (type, mech, gnat_entity)
2214 tree type;
2215 Mechanism_Type mech;
2216 Entity_Id gnat_entity;
2217 {
2218 tree record_type = make_node (RECORD_TYPE);
2219 tree field_list = 0;
2220 int class;
2221 int dtype = 0;
2222 tree inner_type;
2223 int ndim;
2224 int i;
2225 tree *idx_arr;
2226 tree tem;
2227
2228 /* If TYPE is an unconstrained array, use the underlying array type. */
2229 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2230 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2231
2232 /* If this is an array, compute the number of dimensions in the array,
2233 get the index types, and point to the inner type. */
2234 if (TREE_CODE (type) != ARRAY_TYPE)
2235 ndim = 0;
2236 else
2237 for (ndim = 1, inner_type = type;
2238 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2239 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2240 ndim++, inner_type = TREE_TYPE (inner_type))
2241 ;
2242
2243 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2244
2245 if (mech != By_Descriptor_NCA
2246 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2247 for (i = ndim - 1, inner_type = type;
2248 i >= 0;
2249 i--, inner_type = TREE_TYPE (inner_type))
2250 idx_arr[i] = TYPE_DOMAIN (inner_type);
2251 else
2252 for (i = 0, inner_type = type;
2253 i < ndim;
2254 i++, inner_type = TREE_TYPE (inner_type))
2255 idx_arr[i] = TYPE_DOMAIN (inner_type);
2256
2257 /* Now get the DTYPE value. */
2258 switch (TREE_CODE (type))
2259 {
2260 case INTEGER_TYPE:
2261 case ENUMERAL_TYPE:
2262 if (TYPE_VAX_FLOATING_POINT_P (type))
2263 switch ((int) TYPE_DIGITS_VALUE (type))
2264 {
2265 case 6:
2266 dtype = 10;
2267 break;
2268 case 9:
2269 dtype = 11;
2270 break;
2271 case 15:
2272 dtype = 27;
2273 break;
2274 }
2275 else
2276 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2277 {
2278 case 8:
2279 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2280 break;
2281 case 16:
2282 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2283 break;
2284 case 32:
2285 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2286 break;
2287 case 64:
2288 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2289 break;
2290 case 128:
2291 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2292 break;
2293 }
2294 break;
2295
2296 case REAL_TYPE:
2297 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2298 break;
2299
2300 case COMPLEX_TYPE:
2301 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2302 && TYPE_VAX_FLOATING_POINT_P (type))
2303 switch ((int) TYPE_DIGITS_VALUE (type))
2304 {
2305 case 6:
2306 dtype = 12;
2307 break;
2308 case 9:
2309 dtype = 13;
2310 break;
2311 case 15:
2312 dtype = 29;
2313 }
2314 else
2315 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2316 break;
2317
2318 case ARRAY_TYPE:
2319 dtype = 14;
2320 break;
2321
2322 default:
2323 break;
2324 }
2325
2326 /* Get the CLASS value. */
2327 switch (mech)
2328 {
2329 case By_Descriptor_A:
2330 class = 4;
2331 break;
2332 case By_Descriptor_NCA:
2333 class = 10;
2334 break;
2335 case By_Descriptor_SB:
2336 class = 15;
2337 break;
2338 default:
2339 class = 1;
2340 }
2341
2342 /* Make the type for a descriptor for VMS. The first four fields
2343 are the same for all types. */
2344
2345 field_list
2346 = chainon (field_list,
2347 make_descriptor_field
2348 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2349 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2350
2351 field_list = chainon (field_list,
2352 make_descriptor_field ("DTYPE",
2353 gnat_type_for_size (8, 1),
2354 record_type, size_int (dtype)));
2355 field_list = chainon (field_list,
2356 make_descriptor_field ("CLASS",
2357 gnat_type_for_size (8, 1),
2358 record_type, size_int (class)));
2359
2360 field_list
2361 = chainon (field_list,
2362 make_descriptor_field ("POINTER",
2363 build_pointer_type (type),
2364 record_type,
2365 build1 (ADDR_EXPR,
2366 build_pointer_type (type),
2367 build (PLACEHOLDER_EXPR,
2368 type))));
2369
2370 switch (mech)
2371 {
2372 case By_Descriptor:
2373 case By_Descriptor_S:
2374 break;
2375
2376 case By_Descriptor_SB:
2377 field_list
2378 = chainon (field_list,
2379 make_descriptor_field
2380 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2381 TREE_CODE (type) == ARRAY_TYPE
2382 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2383 field_list
2384 = chainon (field_list,
2385 make_descriptor_field
2386 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2387 TREE_CODE (type) == ARRAY_TYPE
2388 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2389 break;
2390
2391 case By_Descriptor_A:
2392 case By_Descriptor_NCA:
2393 field_list = chainon (field_list,
2394 make_descriptor_field ("SCALE",
2395 gnat_type_for_size (8, 1),
2396 record_type,
2397 size_zero_node));
2398
2399 field_list = chainon (field_list,
2400 make_descriptor_field ("DIGITS",
2401 gnat_type_for_size (8, 1),
2402 record_type,
2403 size_zero_node));
2404
2405 field_list
2406 = chainon (field_list,
2407 make_descriptor_field
2408 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2409 size_int (mech == By_Descriptor_NCA
2410 ? 0
2411 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2412 : (TREE_CODE (type) == ARRAY_TYPE
2413 && TYPE_CONVENTION_FORTRAN_P (type)
2414 ? 224 : 192))));
2415
2416 field_list = chainon (field_list,
2417 make_descriptor_field ("DIMCT",
2418 gnat_type_for_size (8, 1),
2419 record_type,
2420 size_int (ndim)));
2421
2422 field_list = chainon (field_list,
2423 make_descriptor_field ("ARSIZE",
2424 gnat_type_for_size (32, 1),
2425 record_type,
2426 size_in_bytes (type)));
2427
2428 /* Now build a pointer to the 0,0,0... element. */
2429 tem = build (PLACEHOLDER_EXPR, type);
2430 for (i = 0, inner_type = type; i < ndim;
2431 i++, inner_type = TREE_TYPE (inner_type))
2432 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2433 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2434
2435 field_list
2436 = chainon (field_list,
2437 make_descriptor_field
2438 ("A0", build_pointer_type (inner_type), record_type,
2439 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2440
2441 /* Next come the addressing coefficients. */
2442 tem = size_int (1);
2443 for (i = 0; i < ndim; i++)
2444 {
2445 char fname[3];
2446 tree idx_length
2447 = size_binop (MULT_EXPR, tem,
2448 size_binop (PLUS_EXPR,
2449 size_binop (MINUS_EXPR,
2450 TYPE_MAX_VALUE (idx_arr[i]),
2451 TYPE_MIN_VALUE (idx_arr[i])),
2452 size_int (1)));
2453
2454 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2455 fname[1] = '0' + i, fname[2] = 0;
2456 field_list
2457 = chainon (field_list,
2458 make_descriptor_field (fname,
2459 gnat_type_for_size (32, 1),
2460 record_type, idx_length));
2461
2462 if (mech == By_Descriptor_NCA)
2463 tem = idx_length;
2464 }
2465
2466 /* Finally here are the bounds. */
2467 for (i = 0; i < ndim; i++)
2468 {
2469 char fname[3];
2470
2471 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2472 field_list
2473 = chainon (field_list,
2474 make_descriptor_field
2475 (fname, gnat_type_for_size (32, 1), record_type,
2476 TYPE_MIN_VALUE (idx_arr[i])));
2477
2478 fname[0] = 'U';
2479 field_list
2480 = chainon (field_list,
2481 make_descriptor_field
2482 (fname, gnat_type_for_size (32, 1), record_type,
2483 TYPE_MAX_VALUE (idx_arr[i])));
2484 }
2485 break;
2486
2487 default:
2488 post_error ("unsupported descriptor type for &", gnat_entity);
2489 }
2490
2491 finish_record_type (record_type, field_list, 0, 1);
2492 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2493 record_type));
2494
2495 return record_type;
2496 }
2497
2498 /* Utility routine for above code to make a field. */
2499
2500 static tree
2501 make_descriptor_field (name, type, rec_type, initial)
2502 const char *name;
2503 tree type;
2504 tree rec_type;
2505 tree initial;
2506 {
2507 tree field
2508 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2509
2510 DECL_INITIAL (field) = initial;
2511 return field;
2512 }
2513 \f
2514 /* Build a type to be used to represent an aliased object whose nominal
2515 type is an unconstrained array. This consists of a RECORD_TYPE containing
2516 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2517 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2518 is used to represent an arbitrary unconstrained object. Use NAME
2519 as the name of the record. */
2520
2521 tree
2522 build_unc_object_type (template_type, object_type, name)
2523 tree template_type;
2524 tree object_type;
2525 tree name;
2526 {
2527 tree type = make_node (RECORD_TYPE);
2528 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2529 template_type, type, 0, 0, 0, 1);
2530 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2531 type, 0, 0, 0, 1);
2532
2533 TYPE_NAME (type) = name;
2534 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2535 finish_record_type (type,
2536 chainon (chainon (NULL_TREE, template_field),
2537 array_field),
2538 0, 0);
2539
2540 return type;
2541 }
2542 \f
2543 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2544 the normal case this is just two adjustments, but we have more to do
2545 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2546
2547 void
2548 update_pointer_to (old_type, new_type)
2549 tree old_type;
2550 tree new_type;
2551 {
2552 tree ptr = TYPE_POINTER_TO (old_type);
2553 tree ref = TYPE_REFERENCE_TO (old_type);
2554 tree type;
2555
2556 /* If this is the main variant, process all the other variants first. */
2557 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2558 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2559 type = TYPE_NEXT_VARIANT (type))
2560 update_pointer_to (type, new_type);
2561
2562 /* If no pointer or reference, we are done. Otherwise, get the new type with
2563 the same qualifiers as the old type and see if it is the same as the old
2564 type. */
2565 if (ptr == 0 && ref == 0)
2566 return;
2567
2568 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2569 if (old_type == new_type)
2570 return;
2571
2572 /* First handle the simple case. */
2573 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2574 {
2575 if (ptr != 0)
2576 TREE_TYPE (ptr) = new_type;
2577 TYPE_POINTER_TO (new_type) = ptr;
2578
2579 if (ref != 0)
2580 TREE_TYPE (ref) = new_type;
2581 TYPE_REFERENCE_TO (new_type) = ref;
2582
2583 if (ptr != 0 && TYPE_NAME (ptr) != 0
2584 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2585 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2586 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2587 global_bindings_p (), 0);
2588 if (ref != 0 && TYPE_NAME (ref) != 0
2589 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2590 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2591 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2592 global_bindings_p (), 0);
2593 }
2594
2595 /* Now deal with the unconstrained array case. In this case the "pointer"
2596 is actually a RECORD_TYPE where the types of both fields are
2597 pointers to void. In that case, copy the field list from the
2598 old type to the new one and update the fields' context. */
2599 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2600 gigi_abort (412);
2601
2602 else
2603 {
2604 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2605 tree ptr_temp_type;
2606 tree new_ref;
2607 tree var;
2608
2609 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2610 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2611 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2612
2613 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2614 template bounds.
2615
2616 ??? This is now the only use of gnat_substitute_in_type, which
2617 is now a very "heavy" routine to do this, so it should be replaced
2618 at some point. */
2619 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2620 new_ref = build (COMPONENT_REF, ptr_temp_type,
2621 build (PLACEHOLDER_EXPR, ptr),
2622 TREE_CHAIN (TYPE_FIELDS (ptr)));
2623
2624 update_pointer_to
2625 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2626 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2627 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2628
2629 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2630 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2631
2632 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2633 = TREE_TYPE (new_type) = ptr;
2634
2635 /* Now handle updating the allocation record, what the thin pointer
2636 points to. Update all pointers from the old record into the new
2637 one, update the types of the fields, and recompute the size. */
2638
2639 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2640
2641 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2642 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2643 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2644 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2645 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2646 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2647 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2648
2649 TYPE_SIZE (new_obj_rec)
2650 = size_binop (PLUS_EXPR,
2651 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2652 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2653 TYPE_SIZE_UNIT (new_obj_rec)
2654 = size_binop (PLUS_EXPR,
2655 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2656 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2657 rest_of_type_compilation (ptr, global_bindings_p ());
2658 }
2659 }
2660 \f
2661 /* Convert a pointer to a constrained array into a pointer to a fat
2662 pointer. This involves making or finding a template. */
2663
2664 static tree
2665 convert_to_fat_pointer (type, expr)
2666 tree type;
2667 tree expr;
2668 {
2669 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2670 tree template, template_addr;
2671 tree etype = TREE_TYPE (expr);
2672
2673 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2674 pointer to the template and array. */
2675 if (integer_zerop (expr))
2676 return
2677 gnat_build_constructor
2678 (type,
2679 tree_cons (TYPE_FIELDS (type),
2680 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2681 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2682 convert (build_pointer_type (template_type),
2683 expr),
2684 NULL_TREE)));
2685
2686 /* If EXPR is a thin pointer, make the template and data from the record. */
2687
2688 else if (TYPE_THIN_POINTER_P (etype))
2689 {
2690 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2691
2692 expr = save_expr (expr);
2693 if (TREE_CODE (expr) == ADDR_EXPR)
2694 expr = TREE_OPERAND (expr, 0);
2695 else
2696 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2697
2698 template = build_component_ref (expr, NULL_TREE, fields);
2699 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2700 build_component_ref (expr, NULL_TREE,
2701 TREE_CHAIN (fields)));
2702 }
2703 else
2704 /* Otherwise, build the constructor for the template. */
2705 template = build_template (template_type, TREE_TYPE (etype), expr);
2706
2707 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2708
2709 /* The result is a CONSTRUCTOR for the fat pointer. */
2710 return
2711 gnat_build_constructor (type,
2712 tree_cons (TYPE_FIELDS (type), expr,
2713 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2714 template_addr, NULL_TREE)));
2715 }
2716 \f
2717 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2718 is something that is a fat pointer, so convert to it first if it EXPR
2719 is not already a fat pointer. */
2720
2721 static tree
2722 convert_to_thin_pointer (type, expr)
2723 tree type;
2724 tree expr;
2725 {
2726 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2727 expr
2728 = convert_to_fat_pointer
2729 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2730
2731 /* We get the pointer to the data and use a NOP_EXPR to make it the
2732 proper GCC type. */
2733 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2734 expr = build1 (NOP_EXPR, type, expr);
2735
2736 return expr;
2737 }
2738 \f
2739 /* Create an expression whose value is that of EXPR,
2740 converted to type TYPE. The TREE_TYPE of the value
2741 is always TYPE. This function implements all reasonable
2742 conversions; callers should filter out those that are
2743 not permitted by the language being compiled. */
2744
2745 tree
2746 convert (type, expr)
2747 tree type, expr;
2748 {
2749 enum tree_code code = TREE_CODE (type);
2750 tree etype = TREE_TYPE (expr);
2751 enum tree_code ecode = TREE_CODE (etype);
2752 tree tem;
2753
2754 /* If EXPR is already the right type, we are done. */
2755 if (type == etype)
2756 return expr;
2757
2758 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2759 new one. */
2760 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2761 return build (WITH_RECORD_EXPR, type,
2762 convert (type, TREE_OPERAND (expr, 0)),
2763 TREE_OPERAND (expr, 1));
2764
2765 /* If the input type has padding, remove it by doing a component reference
2766 to the field. If the output type has padding, make a constructor
2767 to build the record. If both input and output have padding and are
2768 of variable size, do this as an unchecked conversion. */
2769 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2770 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2771 && (! TREE_CONSTANT (TYPE_SIZE (type))
2772 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2773 ;
2774 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2775 {
2776 /* If we have just converted to this padded type, just get
2777 the inner expression. */
2778 if (TREE_CODE (expr) == CONSTRUCTOR
2779 && CONSTRUCTOR_ELTS (expr) != 0
2780 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2781 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2782 else
2783 return convert (type, build_component_ref (expr, NULL_TREE,
2784 TYPE_FIELDS (etype)));
2785 }
2786 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2787 {
2788 /* If we previously converted from another type and our type is
2789 of variable size, remove the conversion to avoid the need for
2790 variable-size temporaries. */
2791 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2792 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2793 expr = TREE_OPERAND (expr, 0);
2794
2795 /* If we are just removing the padding from expr, convert the original
2796 object if we have variable size. That will avoid the need
2797 for some variable-size temporaries. */
2798 if (TREE_CODE (expr) == COMPONENT_REF
2799 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2800 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2801 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2802 return convert (type, TREE_OPERAND (expr, 0));
2803
2804 /* If the result type is a padded type with a self-referentially-sized
2805 field and the expression type is a record, do this as an
2806 unchecked converstion. */
2807 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2808 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2809 && TREE_CODE (etype) == RECORD_TYPE)
2810 return unchecked_convert (type, expr);
2811
2812 else
2813 return
2814 gnat_build_constructor (type,
2815 tree_cons (TYPE_FIELDS (type),
2816 convert (TREE_TYPE
2817 (TYPE_FIELDS (type)),
2818 expr),
2819 NULL_TREE));
2820 }
2821
2822 /* If the input is a biased type, adjust first. */
2823 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2824 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2825 fold (build1 (GNAT_NOP_EXPR,
2826 TREE_TYPE (etype), expr)),
2827 TYPE_MIN_VALUE (etype))));
2828
2829 /* If the input is a left-justified modular type, we need to extract
2830 the actual object before converting it to any other type with the
2831 exception of an unconstrained array. */
2832 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2833 && code != UNCONSTRAINED_ARRAY_TYPE)
2834 return convert (type, build_component_ref (expr, NULL_TREE,
2835 TYPE_FIELDS (etype)));
2836
2837 /* If converting a type that does not contain a template into one
2838 that does, convert to the data type and then build the template. */
2839 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2840 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2841 {
2842 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2843
2844 return
2845 gnat_build_constructor
2846 (type,
2847 tree_cons (TYPE_FIELDS (type),
2848 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2849 obj_type, NULL_TREE),
2850 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2851 convert (obj_type, expr), NULL_TREE)));
2852 }
2853
2854 /* There are some special cases of expressions that we process
2855 specially. */
2856 switch (TREE_CODE (expr))
2857 {
2858 case ERROR_MARK:
2859 return expr;
2860
2861 case TRANSFORM_EXPR:
2862 case NULL_EXPR:
2863 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2864 conversion in gnat_expand_expr. NULL_EXPR does not represent
2865 and actual value, so no conversion is needed. */
2866 TREE_TYPE (expr) = type;
2867 return expr;
2868
2869 case STRING_CST:
2870 case CONSTRUCTOR:
2871 /* If we are converting a STRING_CST to another constrained array type,
2872 just make a new one in the proper type. Likewise for a
2873 CONSTRUCTOR. */
2874 if (code == ecode && AGGREGATE_TYPE_P (etype)
2875 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2876 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2877 {
2878 expr = copy_node (expr);
2879 TREE_TYPE (expr) = type;
2880 return expr;
2881 }
2882 break;
2883
2884 case COMPONENT_REF:
2885 /* If we are converting between two aggregate types of the same
2886 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2887 This avoid unneeded conversions which makes reference computations
2888 more complex. */
2889 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2890 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2891 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2892 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2893 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2894 TREE_OPERAND (expr, 1));
2895
2896 break;
2897
2898 case UNCONSTRAINED_ARRAY_REF:
2899 /* Convert this to the type of the inner array by getting the address of
2900 the array from the template. */
2901 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2902 build_component_ref (TREE_OPERAND (expr, 0),
2903 get_identifier ("P_ARRAY"),
2904 NULL_TREE));
2905 etype = TREE_TYPE (expr);
2906 ecode = TREE_CODE (etype);
2907 break;
2908
2909 case VIEW_CONVERT_EXPR:
2910 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2911 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2912 return convert (type, TREE_OPERAND (expr, 0));
2913 break;
2914
2915 case INDIRECT_REF:
2916 /* If both types are record types, just convert the pointer and
2917 make a new INDIRECT_REF.
2918
2919 ??? Disable this for now since it causes problems with the
2920 code in build_binary_op for MODIFY_EXPR which wants to
2921 strip off conversions. But that code really is a mess and
2922 we need to do this a much better way some time. */
2923 if (0
2924 && (TREE_CODE (type) == RECORD_TYPE
2925 || TREE_CODE (type) == UNION_TYPE)
2926 && (TREE_CODE (etype) == RECORD_TYPE
2927 || TREE_CODE (etype) == UNION_TYPE)
2928 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2929 return build_unary_op (INDIRECT_REF, NULL_TREE,
2930 convert (build_pointer_type (type),
2931 TREE_OPERAND (expr, 0)));
2932 break;
2933
2934 default:
2935 break;
2936 }
2937
2938 /* Check for converting to a pointer to an unconstrained array. */
2939 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2940 return convert_to_fat_pointer (type, expr);
2941
2942 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2943 || (code == INTEGER_CST && ecode == INTEGER_CST
2944 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2945 return fold (build1 (NOP_EXPR, type, expr));
2946
2947 switch (code)
2948 {
2949 case VOID_TYPE:
2950 return build1 (CONVERT_EXPR, type, expr);
2951
2952 case INTEGER_TYPE:
2953 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2954 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2955 return unchecked_convert (type, expr);
2956 else if (TYPE_BIASED_REPRESENTATION_P (type))
2957 return fold (build1 (CONVERT_EXPR, type,
2958 fold (build (MINUS_EXPR, TREE_TYPE (type),
2959 convert (TREE_TYPE (type), expr),
2960 TYPE_MIN_VALUE (type)))));
2961
2962 /* ... fall through ... */
2963
2964 case ENUMERAL_TYPE:
2965 return fold (convert_to_integer (type, expr));
2966
2967 case POINTER_TYPE:
2968 case REFERENCE_TYPE:
2969 /* If converting between two pointers to records denoting
2970 both a template and type, adjust if needed to account
2971 for any differing offsets, since one might be negative. */
2972 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2973 {
2974 tree bit_diff
2975 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2976 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2977 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2978 sbitsize_int (BITS_PER_UNIT));
2979
2980 expr = build1 (NOP_EXPR, type, expr);
2981 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2982 if (integer_zerop (byte_diff))
2983 return expr;
2984
2985 return build_binary_op (PLUS_EXPR, type, expr,
2986 fold (convert_to_pointer (type, byte_diff)));
2987 }
2988
2989 /* If converting to a thin pointer, handle specially. */
2990 if (TYPE_THIN_POINTER_P (type)
2991 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
2992 return convert_to_thin_pointer (type, expr);
2993
2994 /* If converting fat pointer to normal pointer, get the pointer to the
2995 array and then convert it. */
2996 else if (TYPE_FAT_POINTER_P (etype))
2997 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2998 NULL_TREE);
2999
3000 return fold (convert_to_pointer (type, expr));
3001
3002 case REAL_TYPE:
3003 return fold (convert_to_real (type, expr));
3004
3005 case RECORD_TYPE:
3006 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3007 return
3008 gnat_build_constructor
3009 (type, tree_cons (TYPE_FIELDS (type),
3010 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3011 NULL_TREE));
3012
3013 /* ... fall through ... */
3014
3015 case ARRAY_TYPE:
3016 /* In these cases, assume the front-end has validated the conversion.
3017 If the conversion is valid, it will be a bit-wise conversion, so
3018 it can be viewed as an unchecked conversion. */
3019 return unchecked_convert (type, expr);
3020
3021 case UNION_TYPE:
3022 /* Just validate that the type is indeed that of a field
3023 of the type. Then make the simple conversion. */
3024 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3025 if (TREE_TYPE (tem) == etype)
3026 return build1 (CONVERT_EXPR, type, expr);
3027
3028 gigi_abort (413);
3029
3030 case UNCONSTRAINED_ARRAY_TYPE:
3031 /* If EXPR is a constrained array, take its address, convert it to a
3032 fat pointer, and then dereference it. Likewise if EXPR is a
3033 record containing both a template and a constrained array.
3034 Note that a record representing a left justified modular type
3035 always represents a packed constrained array. */
3036 if (ecode == ARRAY_TYPE
3037 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3038 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3039 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3040 return
3041 build_unary_op
3042 (INDIRECT_REF, NULL_TREE,
3043 convert_to_fat_pointer (TREE_TYPE (type),
3044 build_unary_op (ADDR_EXPR,
3045 NULL_TREE, expr)));
3046
3047 /* Do something very similar for converting one unconstrained
3048 array to another. */
3049 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3050 return
3051 build_unary_op (INDIRECT_REF, NULL_TREE,
3052 convert (TREE_TYPE (type),
3053 build_unary_op (ADDR_EXPR,
3054 NULL_TREE, expr)));
3055 else
3056 gigi_abort (409);
3057
3058 case COMPLEX_TYPE:
3059 return fold (convert_to_complex (type, expr));
3060
3061 default:
3062 gigi_abort (410);
3063 }
3064 }
3065 \f
3066 /* Remove all conversions that are done in EXP. This includes converting
3067 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3068 is nonzero, always return the address of the containing object even if
3069 the address is not bit-aligned. */
3070
3071 tree
3072 remove_conversions (exp, true_address)
3073 tree exp;
3074 int true_address;
3075 {
3076 switch (TREE_CODE (exp))
3077 {
3078 case CONSTRUCTOR:
3079 if (true_address
3080 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3081 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3082 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3083 break;
3084
3085 case COMPONENT_REF:
3086 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3087 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3088 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3089 break;
3090
3091 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3092 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3093 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3094
3095 default:
3096 break;
3097 }
3098
3099 return exp;
3100 }
3101 \f
3102 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3103 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3104 likewise return an expression pointing to the underlying array. */
3105
3106 tree
3107 maybe_unconstrained_array (exp)
3108 tree exp;
3109 {
3110 enum tree_code code = TREE_CODE (exp);
3111 tree new;
3112
3113 switch (TREE_CODE (TREE_TYPE (exp)))
3114 {
3115 case UNCONSTRAINED_ARRAY_TYPE:
3116 if (code == UNCONSTRAINED_ARRAY_REF)
3117 {
3118 new
3119 = build_unary_op (INDIRECT_REF, NULL_TREE,
3120 build_component_ref (TREE_OPERAND (exp, 0),
3121 get_identifier ("P_ARRAY"),
3122 NULL_TREE));
3123 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3124 return new;
3125 }
3126
3127 else if (code == NULL_EXPR)
3128 return build1 (NULL_EXPR,
3129 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3130 (TREE_TYPE (TREE_TYPE (exp))))),
3131 TREE_OPERAND (exp, 0));
3132
3133 else if (code == WITH_RECORD_EXPR
3134 && (TREE_OPERAND (exp, 0)
3135 != (new = maybe_unconstrained_array
3136 (TREE_OPERAND (exp, 0)))))
3137 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3138 TREE_OPERAND (exp, 1));
3139
3140 case RECORD_TYPE:
3141 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3142 {
3143 new
3144 = build_component_ref (exp, NULL_TREE,
3145 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3146 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3147 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3148 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3149
3150 return new;
3151 }
3152 break;
3153
3154 default:
3155 break;
3156 }
3157
3158 return exp;
3159 }
3160 \f
3161 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3162
3163 tree
3164 unchecked_convert (type, expr)
3165 tree type;
3166 tree expr;
3167 {
3168 tree etype = TREE_TYPE (expr);
3169
3170 /* If the expression is already the right type, we are done. */
3171 if (etype == type)
3172 return expr;
3173
3174 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3175 new one. */
3176 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3177 return build (WITH_RECORD_EXPR, type,
3178 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3179 TREE_OPERAND (expr, 1));
3180
3181 /* If both types types are integral just do a normal conversion.
3182 Likewise for a conversion to an unconstrained array. */
3183 if ((((INTEGRAL_TYPE_P (type)
3184 && ! (TREE_CODE (type) == INTEGER_TYPE
3185 && TYPE_VAX_FLOATING_POINT_P (type)))
3186 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3187 || (TREE_CODE (type) == RECORD_TYPE
3188 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3189 && ((INTEGRAL_TYPE_P (etype)
3190 && ! (TREE_CODE (etype) == INTEGER_TYPE
3191 && TYPE_VAX_FLOATING_POINT_P (etype)))
3192 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3193 || (TREE_CODE (etype) == RECORD_TYPE
3194 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3195 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3196 {
3197 tree rtype = type;
3198
3199 if (TREE_CODE (etype) == INTEGER_TYPE
3200 && TYPE_BIASED_REPRESENTATION_P (etype))
3201 {
3202 tree ntype = copy_type (etype);
3203
3204 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3205 TYPE_MAIN_VARIANT (ntype) = ntype;
3206 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3207 }
3208
3209 if (TREE_CODE (type) == INTEGER_TYPE
3210 && TYPE_BIASED_REPRESENTATION_P (type))
3211 {
3212 rtype = copy_type (type);
3213 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3214 TYPE_MAIN_VARIANT (rtype) = rtype;
3215 }
3216
3217 expr = convert (rtype, expr);
3218 if (type != rtype)
3219 expr = build1 (GNAT_NOP_EXPR, type, expr);
3220 }
3221
3222 /* If we are converting TO an integral type whose precision is not the
3223 same as its size, first unchecked convert to a record that contains
3224 an object of the output type. Then extract the field. */
3225 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3226 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3227 GET_MODE_BITSIZE (TYPE_MODE (type))))
3228 {
3229 tree rec_type = make_node (RECORD_TYPE);
3230 tree field = create_field_decl (get_identifier ("OBJ"), type,
3231 rec_type, 1, 0, 0, 0);
3232
3233 TYPE_FIELDS (rec_type) = field;
3234 layout_type (rec_type);
3235
3236 expr = unchecked_convert (rec_type, expr);
3237 expr = build_component_ref (expr, NULL_TREE, field);
3238 }
3239
3240 /* Similarly for integral input type whose precision is not equal to its
3241 size. */
3242 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3243 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3244 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3245 {
3246 tree rec_type = make_node (RECORD_TYPE);
3247 tree field
3248 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3249 1, 0, 0, 0);
3250
3251 TYPE_FIELDS (rec_type) = field;
3252 layout_type (rec_type);
3253
3254 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3255 expr = unchecked_convert (type, expr);
3256 }
3257
3258 /* We have a special case when we are converting between two
3259 unconstrained array types. In that case, take the address,
3260 convert the fat pointer types, and dereference. */
3261 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3262 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3263 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3264 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3265 build_unary_op (ADDR_EXPR, NULL_TREE,
3266 expr)));
3267 else
3268 {
3269 expr = maybe_unconstrained_array (expr);
3270 etype = TREE_TYPE (expr);
3271 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3272 }
3273
3274 /* If the result is an integral type whose size is not equal to
3275 the size of the underlying machine type, sign- or zero-extend
3276 the result. We need not do this in the case where the input is
3277 an integral type of the same precision and signedness or if the output
3278 is a biased type or if both the input and output are unsigned. */
3279 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3280 && ! (TREE_CODE (type) == INTEGER_TYPE
3281 && TYPE_BIASED_REPRESENTATION_P (type))
3282 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3283 GET_MODE_BITSIZE (TYPE_MODE (type)))
3284 && ! (INTEGRAL_TYPE_P (etype)
3285 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3286 && operand_equal_p (TYPE_RM_SIZE (type),
3287 (TYPE_RM_SIZE (etype) != 0
3288 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3289 0))
3290 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3291 {
3292 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3293 TREE_UNSIGNED (type));
3294 tree shift_expr
3295 = convert (base_type,
3296 size_binop (MINUS_EXPR,
3297 bitsize_int
3298 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3299 TYPE_RM_SIZE (type)));
3300 expr
3301 = convert (type,
3302 build_binary_op (RSHIFT_EXPR, base_type,
3303 build_binary_op (LSHIFT_EXPR, base_type,
3304 convert (base_type, expr),
3305 shift_expr),
3306 shift_expr));
3307 }
3308
3309 /* An unchecked conversion should never raise Constraint_Error. The code
3310 below assumes that GCC's conversion routines overflow the same way that
3311 the underlying hardware does. This is probably true. In the rare case
3312 when it is false, we can rely on the fact that such conversions are
3313 erroneous anyway. */
3314 if (TREE_CODE (expr) == INTEGER_CST)
3315 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3316
3317 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3318 show no longer constant. */
3319 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3320 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3321 TREE_CONSTANT (expr) = 0;
3322
3323 return expr;
3324 }
3325
3326 #include "gt-ada-utils.h"
3327 #include "gtype-ada.h"