Fix i960-rtems Ada build failure reported by Joel Sherrill.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
25 * *
26 ****************************************************************************/
27
28 #include "config.h"
29 #include "system.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "convert.h"
37
38 #include "ada.h"
39 #include "types.h"
40 #include "atree.h"
41 #include "elists.h"
42 #include "namet.h"
43 #include "nlists.h"
44 #include "stringt.h"
45 #include "uintp.h"
46 #include "fe.h"
47 #include "sinfo.h"
48 #include "einfo.h"
49 #include "ada-tree.h"
50 #include "gigi.h"
51
52 #ifndef MAX_FIXED_MODE_SIZE
53 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
54 #endif
55
56 #ifndef MAX_BITS_PER_WORD
57 #define MAX_BITS_PER_WORD BITS_PER_WORD
58 #endif
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* Tree nodes for the various types and decls we create. */
64 tree gnat_std_decls[(int) ADT_LAST];
65
66 /* Functions to call for each of the possible raise reasons. */
67 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
68
69 /* Associates a GNAT tree node to a GCC tree node. It is used in
70 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
71 of `save_gnu_tree' for more info. */
72 static tree *associate_gnat_to_gnu;
73
74 /* This listhead is used to record any global objects that need elaboration.
75 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
76 initial value to assign. */
77
78 static tree pending_elaborations;
79
80 /* This stack allows us to momentarily switch to generating elaboration
81 lists for an inner context. */
82
83 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
84
85 /* This variable keeps a table for types for each precision so that we only
86 allocate each of them once. Signed and unsigned types are kept separate.
87
88 Note that these types are only used when fold-const requests something
89 special. Perhaps we should NOT share these types; we'll see how it
90 goes later. */
91 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
92
93 /* Likewise for float types, but record these by mode. */
94 static tree float_types[NUM_MACHINE_MODES];
95
96 /* For each binding contour we allocate a binding_level structure which records
97 the entities defined or declared in that contour. Contours include:
98
99 the global one
100 one for each subprogram definition
101 one for each compound statement (declare block)
102
103 Binding contours are used to create GCC tree BLOCK nodes. */
104
105 struct binding_level
106 {
107 /* A chain of ..._DECL nodes for all variables, constants, functions,
108 parameters and type declarations. These ..._DECL nodes are chained
109 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
110 in the reverse of the order supplied to be compatible with the
111 back-end. */
112 tree names;
113 /* For each level (except the global one), a chain of BLOCK nodes for all
114 the levels that were entered and exited one level down from this one. */
115 tree blocks;
116 /* The BLOCK node for this level, if one has been preallocated.
117 If 0, the BLOCK is allocated (if needed) when the level is popped. */
118 tree this_block;
119 /* The binding level containing this one (the enclosing binding level). */
120 struct binding_level *level_chain;
121 };
122
123 /* The binding level currently in effect. */
124 static struct binding_level *current_binding_level = NULL;
125
126 /* A chain of binding_level structures awaiting reuse. */
127 static struct binding_level *free_binding_level = NULL;
128
129 /* The outermost binding level. This binding level is created when the
130 compiler is started and it will exist through the entire compilation. */
131 static struct binding_level *global_binding_level;
132
133 /* Binding level structures are initialized by copying this one. */
134 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
135
136 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
137 static tree compute_related_constant PARAMS ((tree, tree));
138 static tree split_plus PARAMS ((tree, tree *));
139 static int value_zerop PARAMS ((tree));
140 static tree float_type_for_size PARAMS ((int, enum machine_mode));
141 static tree convert_to_fat_pointer PARAMS ((tree, tree));
142 static tree convert_to_thin_pointer PARAMS ((tree, tree));
143 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
144 tree));
145 static void mark_binding_level PARAMS ((PTR));
146 static void mark_e_stack PARAMS ((PTR));
147 \f
148 /* Initialize the association of GNAT nodes to GCC trees. */
149
150 void
151 init_gnat_to_gnu ()
152 {
153 Node_Id gnat_node;
154
155 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
156 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
157
158 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
159 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
160
161 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
162 ggc_add_tree_root (&pending_elaborations, 1);
163 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
164 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
165 (sizeof signed_and_unsigned_types
166 / sizeof signed_and_unsigned_types[0][0]));
167 ggc_add_tree_root (float_types, ARRAY_SIZE (float_types));
168
169 ggc_add_root (&current_binding_level, 1, sizeof current_binding_level,
170 mark_binding_level);
171 }
172
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
176
177 If GNU_DECL is zero, a previous association is to be reset. */
178
179 void
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
182 tree gnu_decl;
183 int no_check;
184 {
185 if (gnu_decl
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
188 gigi_abort (401);
189
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
191 }
192
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
196
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
199
200 tree
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
203 {
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
205 gigi_abort (402);
206
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
208 }
209
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
211
212 int
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
215 {
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
217 }
218
219 \f
220 /* Return non-zero if we are currently in the global binding level. */
221
222 int
223 global_bindings_p ()
224 {
225 return (force_global != 0 || current_binding_level == global_binding_level
226 ? -1 : 0);
227 }
228
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
231
232 tree
233 getdecls ()
234 {
235 return current_binding_level->names;
236 }
237
238 /* Nonzero if the current level needs to have a BLOCK made. */
239
240 int
241 kept_level_p ()
242 {
243 return (current_binding_level->names != 0);
244 }
245
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
248
249 void
250 pushlevel (ignore)
251 int ignore ATTRIBUTE_UNUSED;
252 {
253 struct binding_level *newlevel = NULL;
254
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
257 {
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
260 }
261 else
262 newlevel
263 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
264
265 *newlevel = clear_binding_level;
266
267 /* Add this level to the front of the chain (stack) of levels that are
268 active. */
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
271 }
272
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
276
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
280
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
283 label names.
284
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
287
288 tree
289 poplevel (keep, reverse, functionbody)
290 int keep;
291 int reverse;
292 int functionbody;
293 {
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
296 routine. */
297 tree block = NULL_TREE;
298 tree decl_chain;
299 tree decl_node;
300 tree subblock_chain = current_binding_level->blocks;
301 tree subblock_node;
302 int block_previously_created;
303
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
307 the right order. */
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
311
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
318 {
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
322 }
323
324 block = 0;
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
330 if (block != 0)
331 {
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
334 }
335
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
340
341 /* Clear out the meanings of the local variables of this level. */
342
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
349 {
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
354 }
355
356 {
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
362 }
363
364 if (functionbody)
365 {
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
369 instead. */
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
372 }
373 else if (block)
374 {
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
378 }
379
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
383 else. */
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
387 if (block)
388 TREE_USED (block) = 1;
389
390 return block;
391 }
392 \f
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
396
397 void
398 insert_block (block)
399 tree block;
400 {
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
404 }
405
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
408
409 void
410 set_block (block)
411 tree block;
412 {
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
415 BLOCK_VARS (block));
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
418 }
419
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
422
423 tree
424 pushdecl (decl)
425 tree decl;
426 {
427 struct binding_level *b;
428
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
432 {
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
435 }
436 else
437 {
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
440 }
441
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
445
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
450 {
451 TREE_CHAIN (decl) = b->names;
452 b->names = decl;
453 }
454
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
462 for now). */
463
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
472
473 return decl;
474 }
475 \f
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
478
479 void
480 gnat_init_decl_processing ()
481 {
482 lineno = 0;
483
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
488 pushlevel (0);
489 global_binding_level = current_binding_level;
490
491 build_common_tree_nodes (0);
492
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
497 build_common_tree_nodes_2 (0);
498
499 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
500
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
504 integer_type_node));
505 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
506 char_type_node));
507
508 ptr_void_type_node = build_pointer_type (void_type_node);
509
510 }
511
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
514
515 void
516 init_gigi_decls (long_long_float_type, exception_type)
517 tree long_long_float_type, exception_type;
518 {
519 tree endlink, decl;
520 unsigned int i;
521
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
524 char type. */
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
526 {
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
534 }
535 else
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
537
538 except_type_node = TREE_TYPE (exception_type);
539
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
543
544 void_type_decl_node
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
546 void_type_node));
547
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
550
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
553
554 /* malloc is a function declaration tree for a function to allocate
555 memory. */
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
557 NULL_TREE,
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
560 sizetype,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0);
563
564 /* free is a function declaration tree for a function to free memory. */
565
566 free_decl
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
568 build_function_type (void_type_node,
569 tree_cons (NULL_TREE,
570 ptr_void_type_node,
571 endlink)),
572 NULL_TREE, 0, 1, 1, 0);
573
574 /* Make the types and functions used for exception processing. */
575 jmpbuf_type
576 = build_array_type (gnat_type_for_mode (Pmode, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
580
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
582 get_jmpbuf_decl
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0);
587
588 set_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
591 NULL_TREE,
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0);
595
596 /* Function to get the current exception. */
597 get_excptr_decl
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
600 NULL_TREE,
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0);
603
604 /* Functions that raise exceptions. */
605 raise_nodefer_decl
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
611 endlink)),
612 NULL_TREE, 0, 1, 1, 0);
613
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
618 {
619 decl
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
622 build_function_type (void_type_node,
623 tree_cons (NULL_TREE,
624 build_pointer_type (char_type_node),
625 tree_cons (NULL_TREE,
626 integer_type_node,
627 endlink))),
628 NULL_TREE, 0, 1, 1, 0);
629
630 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
631 i++)
632 gnat_raise_decls[i] = decl;
633 }
634 else
635 /* Otherwise, make one decl for each exception reason. */
636 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
637 {
638 char name[17];
639
640 sprintf (name, "__gnat_rcheck_%.2d", i);
641 gnat_raise_decls[i]
642 = create_subprog_decl
643 (get_identifier (name), NULL_TREE,
644 build_function_type (void_type_node,
645 tree_cons (NULL_TREE,
646 build_pointer_type
647 (char_type_node),
648 tree_cons (NULL_TREE,
649 integer_type_node,
650 endlink))),
651 NULL_TREE, 0, 1, 1, 0);
652 }
653
654 /* Indicate that these never return. */
655 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
656 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
657 TREE_TYPE (raise_nodefer_decl)
658 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
659 TYPE_QUAL_VOLATILE);
660
661 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
662 {
663 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
664 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
665 TREE_TYPE (gnat_raise_decls[i])
666 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
667 TYPE_QUAL_VOLATILE);
668 }
669
670 /* setjmp returns an integer and has one operand, which is a pointer to
671 a jmpbuf. */
672 setjmp_decl
673 = create_subprog_decl
674 (get_identifier ("setjmp"), NULL_TREE,
675 build_function_type (integer_type_node,
676 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
677 NULL_TREE, 0, 1, 1, 0);
678
679 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
680 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
681
682 main_identifier_node = get_identifier ("main");
683
684 ggc_add_tree_root (gnat_std_decls, ARRAY_SIZE (gnat_std_decls));
685 ggc_add_tree_root (gnat_raise_decls, ARRAY_SIZE (gnat_raise_decls));
686 }
687 \f
688 /* This function is called indirectly from toplev.c to handle incomplete
689 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
690 compile_file in toplev.c makes an indirect call through the function pointer
691 incomplete_decl_finalize_hook which is initialized to this routine in
692 init_decl_processing. */
693
694 void
695 gnat_finish_incomplete_decl (dont_care)
696 tree dont_care ATTRIBUTE_UNUSED;
697 {
698 gigi_abort (405);
699 }
700 \f
701 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
702 nodes (FIELDLIST), finish constructing the record or union type.
703 If HAS_REP is nonzero, this record has a rep clause; don't call
704 layout_type but merely set the size and alignment ourselves.
705 If DEFER_DEBUG is nonzero, do not call the debugging routines
706 on this type; it will be done later. */
707
708 void
709 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
710 tree record_type;
711 tree fieldlist;
712 int has_rep;
713 int defer_debug;
714 {
715 enum tree_code code = TREE_CODE (record_type);
716 tree ada_size = bitsize_zero_node;
717 tree size = bitsize_zero_node;
718 tree size_unit = size_zero_node;
719 int var_size = 0;
720 tree field;
721
722 TYPE_FIELDS (record_type) = fieldlist;
723
724 if (TYPE_NAME (record_type) != 0
725 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
726 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
727 else
728 TYPE_STUB_DECL (record_type)
729 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
730 record_type));
731
732 /* We don't need both the typedef name and the record name output in
733 the debugging information, since they are the same. */
734 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
735
736 /* Globally initialize the record first. If this is a rep'ed record,
737 that just means some initializations; otherwise, layout the record. */
738
739 if (has_rep)
740 {
741 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
742 TYPE_MODE (record_type) = BLKmode;
743 if (TYPE_SIZE (record_type) == 0)
744 {
745 TYPE_SIZE (record_type) = bitsize_zero_node;
746 TYPE_SIZE_UNIT (record_type) = size_zero_node;
747 }
748 }
749 else
750 {
751 /* Ensure there isn't a size already set. There can be in an error
752 case where there is a rep clause but all fields have errors and
753 no longer have a position. */
754 TYPE_SIZE (record_type) = 0;
755 layout_type (record_type);
756 }
757
758 /* At this point, the position and size of each field is known. It was
759 either set before entry by a rep clause, or by laying out the type
760 above. We now make a pass through the fields (in reverse order for
761 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
762 (for rep'ed records that are not padding types); and the mode (for
763 rep'ed records). */
764
765 if (code == QUAL_UNION_TYPE)
766 fieldlist = nreverse (fieldlist);
767
768 for (field = fieldlist; field; field = TREE_CHAIN (field))
769 {
770 tree type = TREE_TYPE (field);
771 tree this_size = DECL_SIZE (field);
772 tree this_size_unit = DECL_SIZE_UNIT (field);
773 tree this_ada_size = DECL_SIZE (field);
774
775 /* We need to make an XVE/XVU record if any field has variable size,
776 whether or not the record does. For example, if we have an union,
777 it may be that all fields, rounded up to the alignment, have the
778 same size, in which case we'll use that size. But the debug
779 output routines (except Dwarf2) won't be able to output the fields,
780 so we need to make the special record. */
781 if (TREE_CODE (this_size) != INTEGER_CST)
782 var_size = 1;
783
784 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
785 || TREE_CODE (type) == QUAL_UNION_TYPE)
786 && ! TYPE_IS_FAT_POINTER_P (type)
787 && ! TYPE_CONTAINS_TEMPLATE_P (type)
788 && TYPE_ADA_SIZE (type) != 0)
789 this_ada_size = TYPE_ADA_SIZE (type);
790
791 if (has_rep && ! DECL_BIT_FIELD (field))
792 TYPE_ALIGN (record_type)
793 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
794
795 switch (code)
796 {
797 case UNION_TYPE:
798 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
799 size = size_binop (MAX_EXPR, size, this_size);
800 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
801 break;
802
803 case QUAL_UNION_TYPE:
804 ada_size
805 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
806 this_ada_size, ada_size));
807 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
808 this_size, size));
809 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
810 this_size_unit, size_unit));
811 break;
812
813 case RECORD_TYPE:
814 /* Since we know here that all fields are sorted in order of
815 increasing bit position, the size of the record is one
816 higher than the ending bit of the last field processed
817 unless we have a rep clause, since in that case we might
818 have a field outside a QUAL_UNION_TYPE that has a higher ending
819 position. So use a MAX in that case. Also, if this field is a
820 QUAL_UNION_TYPE, we need to take into account the previous size in
821 the case of empty variants. */
822 ada_size
823 = merge_sizes (ada_size, bit_position (field), this_ada_size,
824 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
825 size = merge_sizes (size, bit_position (field), this_size,
826 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
827 size_unit
828 = merge_sizes (size_unit, byte_position (field), this_size_unit,
829 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
830 break;
831
832 default:
833 abort ();
834 }
835 }
836
837 if (code == QUAL_UNION_TYPE)
838 nreverse (fieldlist);
839
840 /* If this is a padding record, we never want to make the size smaller than
841 what was specified in it, if any. */
842 if (TREE_CODE (record_type) == RECORD_TYPE
843 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
844 {
845 size = TYPE_SIZE (record_type);
846 size_unit = TYPE_SIZE_UNIT (record_type);
847 }
848
849 /* Now set any of the values we've just computed that apply. */
850 if (! TYPE_IS_FAT_POINTER_P (record_type)
851 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
852 TYPE_ADA_SIZE (record_type) = ada_size;
853
854 #ifdef ROUND_TYPE_SIZE
855 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
856 size_unit = ROUND_TYPE_SIZE_UNIT (record_type, size_unit,
857 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
858 #else
859 size = round_up (size, TYPE_ALIGN (record_type));
860 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
861 #endif
862
863 if (has_rep
864 && ! (TREE_CODE (record_type) == RECORD_TYPE
865 && TYPE_IS_PADDING_P (record_type)
866 && TREE_CODE (size) != INTEGER_CST
867 && contains_placeholder_p (size)))
868 {
869 TYPE_SIZE (record_type) = size;
870 TYPE_SIZE_UNIT (record_type) = size_unit;
871 }
872
873 if (has_rep)
874 compute_record_mode (record_type);
875
876 if (! defer_debug)
877 {
878 /* If this record is of variable size, rename it so that the
879 debugger knows it is and make a new, parallel, record
880 that tells the debugger how the record is laid out. See
881 exp_dbug.ads. */
882 if (var_size)
883 {
884 tree new_record_type
885 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
886 ? UNION_TYPE : TREE_CODE (record_type));
887 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
888 tree new_id
889 = concat_id_with_name (orig_id,
890 TREE_CODE (record_type) == QUAL_UNION_TYPE
891 ? "XVU" : "XVE");
892 tree last_pos = bitsize_zero_node;
893 tree old_field;
894
895 TYPE_NAME (new_record_type) = new_id;
896 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
897 TYPE_STUB_DECL (new_record_type)
898 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
899 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
900 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
901 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
902 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
903
904 /* Now scan all the fields, replacing each field with a new
905 field corresponding to the new encoding. */
906 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
907 old_field = TREE_CHAIN (old_field))
908 {
909 tree field_type = TREE_TYPE (old_field);
910 tree field_name = DECL_NAME (old_field);
911 tree new_field;
912 tree curpos = bit_position (old_field);
913 int var = 0;
914 unsigned int align = 0;
915 tree pos;
916
917 /* See how the position was modified from the last position.
918
919 There are two basic cases we support: a value was added
920 to the last position or the last position was rounded to
921 a boundary and they something was added. Check for the
922 first case first. If not, see if there is any evidence
923 of rounding. If so, round the last position and try
924 again.
925
926 If this is a union, the position can be taken as zero. */
927
928 if (TREE_CODE (new_record_type) == UNION_TYPE)
929 pos = bitsize_zero_node, align = 0;
930 else
931 pos = compute_related_constant (curpos, last_pos);
932
933 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
934 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
935 {
936 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
937 pos = compute_related_constant (curpos,
938 round_up (last_pos, align));
939 }
940 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
941 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
942 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
943 && host_integerp (TREE_OPERAND
944 (TREE_OPERAND (curpos, 0), 1),
945 1))
946 {
947 align
948 = tree_low_cst
949 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
950 pos = compute_related_constant (curpos,
951 round_up (last_pos, align));
952 }
953
954 /* If we can't compute a position, set it to zero.
955
956 ??? We really should abort here, but it's too much work
957 to get this correct for all cases. */
958
959 if (pos == 0)
960 pos = bitsize_zero_node;
961
962 /* See if this type is variable-size and make a new type
963 and indicate the indirection if so. */
964 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
965 {
966 field_type = build_pointer_type (field_type);
967 var = 1;
968 }
969
970 /* Make a new field name, if necessary. */
971 if (var || align != 0)
972 {
973 char suffix[6];
974
975 if (align != 0)
976 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
977 align / BITS_PER_UNIT);
978 else
979 strcpy (suffix, "XVL");
980
981 field_name = concat_id_with_name (field_name, suffix);
982 }
983
984 new_field = create_field_decl (field_name, field_type,
985 new_record_type, 0,
986 DECL_SIZE (old_field), pos, 0);
987 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
988 TYPE_FIELDS (new_record_type) = new_field;
989
990 /* If old_field is a QUAL_UNION_TYPE, take its size as being
991 zero. The only time it's not the last field of the record
992 is when there are other components at fixed positions after
993 it (meaning there was a rep clause for every field) and we
994 want to be able to encode them. */
995 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
996 (TREE_CODE (TREE_TYPE (old_field))
997 == QUAL_UNION_TYPE)
998 ? bitsize_zero_node
999 : DECL_SIZE (old_field));
1000 }
1001
1002 TYPE_FIELDS (new_record_type)
1003 = nreverse (TYPE_FIELDS (new_record_type));
1004
1005 rest_of_type_compilation (new_record_type, global_bindings_p ());
1006 }
1007
1008 rest_of_type_compilation (record_type, global_bindings_p ());
1009 }
1010 }
1011
1012 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1013 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1014 if this represents a QUAL_UNION_TYPE in which case we must look for
1015 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1016 is nonzero, we must take the MAX of the end position of this field
1017 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1018
1019 We return an expression for the size. */
1020
1021 static tree
1022 merge_sizes (last_size, first_bit, size, special, has_rep)
1023 tree last_size;
1024 tree first_bit, size;
1025 int special;
1026 int has_rep;
1027 {
1028 tree type = TREE_TYPE (last_size);
1029
1030 if (! special || TREE_CODE (size) != COND_EXPR)
1031 {
1032 tree new = size_binop (PLUS_EXPR, first_bit, size);
1033
1034 if (has_rep)
1035 new = size_binop (MAX_EXPR, last_size, new);
1036
1037 return new;
1038 }
1039
1040 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1041 integer_zerop (TREE_OPERAND (size, 1))
1042 ? last_size : merge_sizes (last_size, first_bit,
1043 TREE_OPERAND (size, 1),
1044 1, has_rep),
1045 integer_zerop (TREE_OPERAND (size, 2))
1046 ? last_size : merge_sizes (last_size, first_bit,
1047 TREE_OPERAND (size, 2),
1048 1, has_rep)));
1049 }
1050
1051 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1052 related by the addition of a constant. Return that constant if so. */
1053
1054 static tree
1055 compute_related_constant (op0, op1)
1056 tree op0, op1;
1057 {
1058 tree op0_var, op1_var;
1059 tree op0_con = split_plus (op0, &op0_var);
1060 tree op1_con = split_plus (op1, &op1_var);
1061 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1062
1063 if (operand_equal_p (op0_var, op1_var, 0))
1064 return result;
1065 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1066 return result;
1067 else
1068 return 0;
1069 }
1070
1071 /* Utility function of above to split a tree OP which may be a sum, into a
1072 constant part, which is returned, and a variable part, which is stored
1073 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1074 sizetype. */
1075
1076 static tree
1077 split_plus (in, pvar)
1078 tree in;
1079 tree *pvar;
1080 {
1081 tree result = bitsize_zero_node;
1082
1083 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1084 in = TREE_OPERAND (in, 0);
1085
1086 *pvar = in;
1087 if (TREE_CODE (in) == INTEGER_CST)
1088 {
1089 *pvar = bitsize_zero_node;
1090 return in;
1091 }
1092 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1093 {
1094 tree lhs_var, rhs_var;
1095 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1096 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1097
1098 result = size_binop (PLUS_EXPR, result, lhs_con);
1099 result = size_binop (TREE_CODE (in), result, rhs_con);
1100
1101 if (lhs_var == TREE_OPERAND (in, 0)
1102 && rhs_var == TREE_OPERAND (in, 1))
1103 return bitsize_zero_node;
1104
1105 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1106 return result;
1107 }
1108 else
1109 return bitsize_zero_node;
1110 }
1111 \f
1112 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1113 subprogram. If it is void_type_node, then we are dealing with a procedure,
1114 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1115 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1116 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1117 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1118 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1119 RETURNS_WITH_DSP is nonzero if the function is to return with a
1120 depressed stack pointer. */
1121
1122 tree
1123 create_subprog_type (return_type, param_decl_list, cico_list,
1124 returns_unconstrained, returns_by_ref, returns_with_dsp)
1125 tree return_type;
1126 tree param_decl_list;
1127 tree cico_list;
1128 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1129 {
1130 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1131 the subprogram formal parameters. This list is generated by traversing the
1132 input list of PARM_DECL nodes. */
1133 tree param_type_list = NULL;
1134 tree param_decl;
1135 tree type;
1136
1137 for (param_decl = param_decl_list; param_decl;
1138 param_decl = TREE_CHAIN (param_decl))
1139 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1140 param_type_list);
1141
1142 /* The list of the function parameter types has to be terminated by the void
1143 type to signal to the back-end that we are not dealing with a variable
1144 parameter subprogram, but that the subprogram has a fixed number of
1145 parameters. */
1146 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1147
1148 /* The list of argument types has been created in reverse
1149 so nreverse it. */
1150 param_type_list = nreverse (param_type_list);
1151
1152 type = build_function_type (return_type, param_type_list);
1153
1154 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1155 or the new type should, make a copy of TYPE. Likewise for
1156 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1157 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1158 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1159 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1160 type = copy_type (type);
1161
1162 TYPE_CI_CO_LIST (type) = cico_list;
1163 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1164 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1165 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1166 return type;
1167 }
1168 \f
1169 /* Return a copy of TYPE but safe to modify in any way. */
1170
1171 tree
1172 copy_type (type)
1173 tree type;
1174 {
1175 tree new = copy_node (type);
1176
1177 /* copy_node clears this field instead of copying it, because it is
1178 aliased with TREE_CHAIN. */
1179 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1180
1181 TYPE_POINTER_TO (new) = 0;
1182 TYPE_REFERENCE_TO (new) = 0;
1183 TYPE_MAIN_VARIANT (new) = new;
1184 TYPE_NEXT_VARIANT (new) = 0;
1185
1186 return new;
1187 }
1188 \f
1189 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1190 TYPE_INDEX_TYPE is INDEX. */
1191
1192 tree
1193 create_index_type (min, max, index)
1194 tree min, max;
1195 tree index;
1196 {
1197 /* First build a type for the desired range. */
1198 tree type = build_index_2_type (min, max);
1199
1200 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1201 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1202 is set, but not to INDEX, make a copy of this type with the requested
1203 index type. Note that we have no way of sharing these types, but that's
1204 only a small hole. */
1205 if (TYPE_INDEX_TYPE (type) == index)
1206 return type;
1207 else if (TYPE_INDEX_TYPE (type) != 0)
1208 type = copy_type (type);
1209
1210 TYPE_INDEX_TYPE (type) = index;
1211 return type;
1212 }
1213 \f
1214 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1215 string) and TYPE is a ..._TYPE node giving its data type.
1216 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1217 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1218 information about this type. */
1219
1220 tree
1221 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1222 tree type_name;
1223 tree type;
1224 struct attrib *attr_list;
1225 int artificial_p;
1226 int debug_info_p;
1227 {
1228 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1229 enum tree_code code = TREE_CODE (type);
1230
1231 DECL_ARTIFICIAL (type_decl) = artificial_p;
1232 pushdecl (type_decl);
1233 process_attributes (type_decl, attr_list);
1234
1235 /* Pass type declaration information to the debugger unless this is an
1236 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1237 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1238 a dummy type, which will be completed later, or a type for which
1239 debugging information was not requested. */
1240 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1241 || ! debug_info_p)
1242 DECL_IGNORED_P (type_decl) = 1;
1243 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1244 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1245 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1246 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1247
1248 return type_decl;
1249 }
1250
1251 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1252 ASM_NAME is its assembler name (if provided). TYPE is its data type
1253 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1254 expression; NULL_TREE if none.
1255
1256 CONST_FLAG is nonzero if this variable is constant.
1257
1258 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1259 the current compilation unit. This flag should be set when processing the
1260 variable definitions in a package specification. EXTERN_FLAG is nonzero
1261 when processing an external variable declaration (as opposed to a
1262 definition: no storage is to be allocated for the variable here).
1263
1264 STATIC_FLAG is only relevant when not at top level. In that case
1265 it indicates whether to always allocate storage to the variable. */
1266
1267 tree
1268 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1269 extern_flag, static_flag, attr_list)
1270 tree var_name;
1271 tree asm_name;
1272 tree type;
1273 tree var_init;
1274 int const_flag;
1275 int public_flag;
1276 int extern_flag;
1277 int static_flag;
1278 struct attrib *attr_list;
1279 {
1280 int init_const
1281 = (var_init == 0
1282 ? 0
1283 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1284 && (global_bindings_p () || static_flag
1285 ? 0 != initializer_constant_valid_p (var_init,
1286 TREE_TYPE (var_init))
1287 : TREE_CONSTANT (var_init))));
1288 tree var_decl
1289 = build_decl ((const_flag && init_const
1290 /* Only make a CONST_DECL for sufficiently-small objects.
1291 We consider complex double "sufficiently-small" */
1292 && TYPE_SIZE (type) != 0
1293 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1294 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1295 GET_MODE_SIZE (DCmode)))
1296 ? CONST_DECL : VAR_DECL, var_name, type);
1297 tree assign_init = 0;
1298
1299 /* If this is external, throw away any initializations unless this is a
1300 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1301 we are defining a global here, leave a constant initialization and save
1302 any variable elaborations for the elaboration routine. Otherwise, if
1303 the initializing expression is not the same as TYPE, generate the
1304 initialization with an assignment statement, since it knows how
1305 to do the required adjustents. If we are just annotating types,
1306 throw away the initialization if it isn't a constant. */
1307
1308 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1309 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1310 var_init = 0;
1311
1312 if (global_bindings_p () && var_init != 0 && ! init_const)
1313 {
1314 add_pending_elaborations (var_decl, var_init);
1315 var_init = 0;
1316 }
1317
1318 else if (var_init != 0
1319 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1320 != TYPE_MAIN_VARIANT (type))
1321 || (static_flag && ! init_const)))
1322 assign_init = var_init, var_init = 0;
1323
1324 DECL_COMMON (var_decl) = !flag_no_common;
1325 DECL_INITIAL (var_decl) = var_init;
1326 TREE_READONLY (var_decl) = const_flag;
1327 DECL_EXTERNAL (var_decl) = extern_flag;
1328 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1329 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1330 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1331 = TYPE_VOLATILE (type);
1332
1333 /* At the global binding level we need to allocate static storage for the
1334 variable if and only if its not external. If we are not at the top level
1335 we allocate automatic storage unless requested not to. */
1336 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1337
1338 if (asm_name != 0)
1339 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1340
1341 process_attributes (var_decl, attr_list);
1342
1343 /* Add this decl to the current binding level and generate any
1344 needed code and RTL. */
1345 var_decl = pushdecl (var_decl);
1346 expand_decl (var_decl);
1347
1348 if (DECL_CONTEXT (var_decl) != 0)
1349 expand_decl_init (var_decl);
1350
1351 /* If this is volatile, force it into memory. */
1352 if (TREE_SIDE_EFFECTS (var_decl))
1353 gnat_mark_addressable (var_decl);
1354
1355 if (TREE_CODE (var_decl) != CONST_DECL)
1356 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1357
1358 if (assign_init != 0)
1359 {
1360 /* If VAR_DECL has a padded type, convert it to the unpadded
1361 type so the assignment is done properly. */
1362 tree lhs = var_decl;
1363
1364 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1365 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1366 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1367
1368 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1369 assign_init));
1370 }
1371
1372 return var_decl;
1373 }
1374 \f
1375 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1376 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1377 this field is in a record type with a "pragma pack". If SIZE is nonzero
1378 it is the specified size for this field. If POS is nonzero, it is the bit
1379 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1380 the address of this field for aliasing purposes. */
1381
1382 tree
1383 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1384 addressable)
1385 tree field_name;
1386 tree field_type;
1387 tree record_type;
1388 int packed;
1389 tree size, pos;
1390 int addressable;
1391 {
1392 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1393
1394 DECL_CONTEXT (field_decl) = record_type;
1395 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1396
1397 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1398 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1399 If it is a padding type where the inner field is of variable size, it
1400 must be at its natural alignment. Just handle the packed case here; we
1401 will disallow non-aligned rep clauses elsewhere. */
1402 if (packed && TYPE_MODE (field_type) == BLKmode)
1403 DECL_ALIGN (field_decl)
1404 = ((TREE_CODE (field_type) == RECORD_TYPE
1405 && TYPE_IS_PADDING_P (field_type)
1406 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1407 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1408
1409 /* If a size is specified, use it. Otherwise, see if we have a size
1410 to use that may differ from the natural size of the object. */
1411 if (size != 0)
1412 size = convert (bitsizetype, size);
1413 else if (packed)
1414 {
1415 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1416 TYPE_SIZE (field_type), 0))
1417 size = rm_size (field_type);
1418
1419 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1420 byte. */
1421 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1422 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1423 size = round_up (size, BITS_PER_UNIT);
1424 }
1425
1426 /* Make a bitfield if a size is specified for two reasons: first if the size
1427 differs from the natural size. Second, if the alignment is insufficient.
1428 There are a number of ways the latter can be true. But never make a
1429 bitfield if the type of the field has a nonconstant size. */
1430
1431 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1432 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1433 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1434 || (pos != 0
1435 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1436 bitsize_int (TYPE_ALIGN
1437 (field_type)))))
1438 || packed
1439 || (TYPE_ALIGN (record_type) != 0
1440 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1441 {
1442 DECL_BIT_FIELD (field_decl) = 1;
1443 DECL_SIZE (field_decl) = size;
1444 if (! packed && pos == 0)
1445 DECL_ALIGN (field_decl)
1446 = (TYPE_ALIGN (record_type) != 0
1447 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1448 : TYPE_ALIGN (field_type));
1449 }
1450
1451 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1452 DECL_ALIGN (field_decl)
1453 = MAX (DECL_ALIGN (field_decl),
1454 DECL_BIT_FIELD (field_decl) ? 1
1455 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1456 : TYPE_ALIGN (field_type));
1457
1458 if (pos != 0)
1459 {
1460 /* We need to pass in the alignment the DECL is known to have.
1461 This is the lowest-order bit set in POS, but no more than
1462 the alignment of the record, if one is specified. Note
1463 that an alignment of 0 is taken as infinite. */
1464 unsigned int known_align;
1465
1466 if (host_integerp (pos, 1))
1467 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1468 else
1469 known_align = BITS_PER_UNIT;
1470
1471 if (TYPE_ALIGN (record_type)
1472 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1473 known_align = TYPE_ALIGN (record_type);
1474
1475 layout_decl (field_decl, known_align);
1476 SET_DECL_OFFSET_ALIGN (field_decl,
1477 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1478 : BITS_PER_UNIT);
1479 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1480 &DECL_FIELD_BIT_OFFSET (field_decl),
1481 DECL_OFFSET_ALIGN (field_decl), pos);
1482
1483 DECL_HAS_REP_P (field_decl) = 1;
1484 }
1485
1486 /* If the field type is passed by reference, we will have pointers to the
1487 field, so it is addressable. */
1488 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1489 addressable = 1;
1490
1491 /* Mark the decl as nonaddressable if it either is indicated so semantically
1492 or if it is a bit field. */
1493 DECL_NONADDRESSABLE_P (field_decl)
1494 = ! addressable || DECL_BIT_FIELD (field_decl);
1495
1496 return field_decl;
1497 }
1498
1499 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1500 effects, has the value of zero. */
1501
1502 static int
1503 value_zerop (exp)
1504 tree exp;
1505 {
1506 if (TREE_CODE (exp) == COMPOUND_EXPR)
1507 return value_zerop (TREE_OPERAND (exp, 1));
1508
1509 return integer_zerop (exp);
1510 }
1511 \f
1512 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1513 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1514 readonly (either an IN parameter or an address of a pass-by-ref
1515 parameter). */
1516
1517 tree
1518 create_param_decl (param_name, param_type, readonly)
1519 tree param_name;
1520 tree param_type;
1521 int readonly;
1522 {
1523 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1524
1525 DECL_ARG_TYPE (param_decl) = param_type;
1526 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1527 TREE_READONLY (param_decl) = readonly;
1528 return param_decl;
1529 }
1530 \f
1531 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1532
1533 void
1534 process_attributes (decl, attr_list)
1535 tree decl;
1536 struct attrib *attr_list;
1537 {
1538 for (; attr_list; attr_list = attr_list->next)
1539 switch (attr_list->type)
1540 {
1541 case ATTR_MACHINE_ATTRIBUTE:
1542 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1543 NULL_TREE),
1544 ATTR_FLAG_TYPE_IN_PLACE);
1545 break;
1546
1547 case ATTR_LINK_ALIAS:
1548 TREE_STATIC (decl) = 1;
1549 assemble_alias (decl, attr_list->name);
1550 break;
1551
1552 case ATTR_WEAK_EXTERNAL:
1553 if (SUPPORTS_WEAK)
1554 declare_weak (decl);
1555 else
1556 post_error ("?weak declarations not supported on this target",
1557 attr_list->error_point);
1558 break;
1559
1560 case ATTR_LINK_SECTION:
1561 #ifdef ASM_OUTPUT_SECTION_NAME
1562 DECL_SECTION_NAME (decl)
1563 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1564 IDENTIFIER_POINTER (attr_list->name));
1565 DECL_COMMON (decl) = 0;
1566 #else
1567 post_error ("?section attributes are not supported for this target",
1568 attr_list->error_point);
1569 #endif
1570 break;
1571 }
1572 }
1573 \f
1574 /* Add some pending elaborations on the list. */
1575
1576 void
1577 add_pending_elaborations (var_decl, var_init)
1578 tree var_decl;
1579 tree var_init;
1580 {
1581 if (var_init != 0)
1582 Check_Elaboration_Code_Allowed (error_gnat_node);
1583
1584 pending_elaborations
1585 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1586 }
1587
1588 /* Obtain any pending elaborations and clear the old list. */
1589
1590 tree
1591 get_pending_elaborations ()
1592 {
1593 /* Each thing added to the list went on the end; we want it on the
1594 beginning. */
1595 tree result = TREE_CHAIN (pending_elaborations);
1596
1597 TREE_CHAIN (pending_elaborations) = 0;
1598 return result;
1599 }
1600
1601 /* Mark the binding level stack. */
1602
1603 static void
1604 mark_binding_level (arg)
1605 PTR arg;
1606 {
1607 struct binding_level *level = *(struct binding_level **) arg;
1608
1609 for (; level != 0; level = level->level_chain)
1610 {
1611 ggc_mark_tree (level->names);
1612 ggc_mark_tree (level->blocks);
1613 ggc_mark_tree (level->this_block);
1614 }
1615 }
1616
1617 /* Mark the pending elaboration list. */
1618
1619 static void
1620 mark_e_stack (data)
1621 PTR data;
1622 {
1623 struct e_stack *p = *((struct e_stack **) data);
1624
1625 if (p != 0)
1626 {
1627 ggc_mark_tree (p->elab_list);
1628 mark_e_stack (&p->next);
1629 }
1630 }
1631
1632 /* Return nonzero if there are pending elaborations. */
1633
1634 int
1635 pending_elaborations_p ()
1636 {
1637 return TREE_CHAIN (pending_elaborations) != 0;
1638 }
1639
1640 /* Save a copy of the current pending elaboration list and make a new
1641 one. */
1642
1643 void
1644 push_pending_elaborations ()
1645 {
1646 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1647
1648 p->next = elist_stack;
1649 p->elab_list = pending_elaborations;
1650 elist_stack = p;
1651 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1652 }
1653
1654 /* Pop the stack of pending elaborations. */
1655
1656 void
1657 pop_pending_elaborations ()
1658 {
1659 struct e_stack *p = elist_stack;
1660
1661 pending_elaborations = p->elab_list;
1662 elist_stack = p->next;
1663 free (p);
1664 }
1665
1666 /* Return the current position in pending_elaborations so we can insert
1667 elaborations after that point. */
1668
1669 tree
1670 get_elaboration_location ()
1671 {
1672 return tree_last (pending_elaborations);
1673 }
1674
1675 /* Insert the current elaborations after ELAB, which is in some elaboration
1676 list. */
1677
1678 void
1679 insert_elaboration_list (elab)
1680 tree elab;
1681 {
1682 tree next = TREE_CHAIN (elab);
1683
1684 if (TREE_CHAIN (pending_elaborations))
1685 {
1686 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1687 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1688 TREE_CHAIN (pending_elaborations) = 0;
1689 }
1690 }
1691
1692 /* Returns a LABEL_DECL node for LABEL_NAME. */
1693
1694 tree
1695 create_label_decl (label_name)
1696 tree label_name;
1697 {
1698 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1699
1700 DECL_CONTEXT (label_decl) = current_function_decl;
1701 DECL_MODE (label_decl) = VOIDmode;
1702 DECL_SOURCE_LINE (label_decl) = lineno;
1703 DECL_SOURCE_FILE (label_decl) = input_filename;
1704
1705 return label_decl;
1706 }
1707 \f
1708 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1709 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1710 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1711 PARM_DECL nodes chained through the TREE_CHAIN field).
1712
1713 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1714 appropriate fields in the FUNCTION_DECL. */
1715
1716 tree
1717 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1718 inline_flag, public_flag, extern_flag, attr_list)
1719 tree subprog_name;
1720 tree asm_name;
1721 tree subprog_type;
1722 tree param_decl_list;
1723 int inline_flag;
1724 int public_flag;
1725 int extern_flag;
1726 struct attrib *attr_list;
1727 {
1728 tree return_type = TREE_TYPE (subprog_type);
1729 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1730
1731 /* If this is a function nested inside an inlined external function, it
1732 means we aren't going to compile the outer function unless it is
1733 actually inlined, so do the same for us. */
1734 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1735 && DECL_EXTERNAL (current_function_decl))
1736 extern_flag = 1;
1737
1738 DECL_EXTERNAL (subprog_decl) = extern_flag;
1739 TREE_PUBLIC (subprog_decl) = public_flag;
1740 DECL_INLINE (subprog_decl) = inline_flag;
1741 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1742 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1743 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1744 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1745 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1746
1747 if (asm_name != 0)
1748 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1749
1750 process_attributes (subprog_decl, attr_list);
1751
1752 /* Add this decl to the current binding level. */
1753 subprog_decl = pushdecl (subprog_decl);
1754
1755 /* Output the assembler code and/or RTL for the declaration. */
1756 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1757
1758 return subprog_decl;
1759 }
1760 \f
1761 /* Count how deep we are into nested functions. This is because
1762 we shouldn't call the backend function context routines unless we
1763 are in a nested function. */
1764
1765 static int function_nesting_depth;
1766
1767 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1768 body. This routine needs to be invoked before processing the declarations
1769 appearing in the subprogram. */
1770
1771 void
1772 begin_subprog_body (subprog_decl)
1773 tree subprog_decl;
1774 {
1775 tree param_decl_list;
1776 tree param_decl;
1777 tree next_param;
1778
1779 if (function_nesting_depth++ != 0)
1780 push_function_context ();
1781
1782 announce_function (subprog_decl);
1783
1784 /* Make this field nonzero so further routines know that this is not
1785 tentative. error_mark_node is replaced below (in poplevel) with the
1786 adequate BLOCK. */
1787 DECL_INITIAL (subprog_decl) = error_mark_node;
1788
1789 /* This function exists in static storage. This does not mean `static' in
1790 the C sense! */
1791 TREE_STATIC (subprog_decl) = 1;
1792
1793 /* Enter a new binding level. */
1794 current_function_decl = subprog_decl;
1795 pushlevel (0);
1796
1797 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1798 subprogram body) so that they can be recognized as local variables in the
1799 subprogram.
1800
1801 The list of PARM_DECL nodes is stored in the right order in
1802 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1803 which they are transmitted to `pushdecl' we need to reverse the list of
1804 PARM_DECLs if we want it to be stored in the right order. The reason why
1805 we want to make sure the PARM_DECLs are stored in the correct order is
1806 that this list will be retrieved in a few lines with a call to `getdecl'
1807 to store it back into the DECL_ARGUMENTS field. */
1808 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1809
1810 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1811 {
1812 next_param = TREE_CHAIN (param_decl);
1813 TREE_CHAIN (param_decl) = NULL;
1814 pushdecl (param_decl);
1815 }
1816
1817 /* Store back the PARM_DECL nodes. They appear in the right order. */
1818 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1819
1820 init_function_start (subprog_decl, input_filename, lineno);
1821 expand_function_start (subprog_decl, 0);
1822
1823 /* If this function is `main', emit a call to `__main'
1824 to run global initializers, etc. */
1825 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1826 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1827 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1828 expand_main_function ();
1829 }
1830
1831 /* Finish the definition of the current subprogram and compile it all the way
1832 to assembler language output. */
1833
1834 void
1835 end_subprog_body ()
1836 {
1837 tree decl;
1838 tree cico_list;
1839
1840 poplevel (1, 0, 1);
1841 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1842 = current_function_decl;
1843
1844 /* Mark the RESULT_DECL as being in this subprogram. */
1845 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1846
1847 expand_function_end (input_filename, lineno, 0);
1848
1849 /* If this is a nested function, push a new GC context. That will keep
1850 local variables on the stack from being collected while we're doing
1851 the compilation of this function. */
1852 if (function_nesting_depth > 1)
1853 ggc_push_context ();
1854
1855 rest_of_compilation (current_function_decl);
1856
1857 if (function_nesting_depth > 1)
1858 ggc_pop_context ();
1859
1860 #if 0
1861 /* If we're sure this function is defined in this file then mark it
1862 as such */
1863 if (TREE_ASM_WRITTEN (current_function_decl))
1864 mark_fn_defined_in_this_file (current_function_decl);
1865 #endif
1866
1867 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1868 not be seen when we call this function and will be in
1869 unallocated memory anyway. */
1870 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1871 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1872 TREE_VALUE (cico_list) = 0;
1873
1874 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1875 {
1876 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1877 was saved for inline, in which case the DECL_RTLs are in
1878 preserved memory. */
1879 for (decl = DECL_ARGUMENTS (current_function_decl);
1880 decl != 0; decl = TREE_CHAIN (decl))
1881 {
1882 SET_DECL_RTL (decl, 0);
1883 DECL_INCOMING_RTL (decl) = 0;
1884 }
1885
1886 /* Similarly, discard DECL_RTL of the return value. */
1887 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1888
1889 /* But DECL_INITIAL must remain nonzero so we know this
1890 was an actual function definition unless toplev.c decided not
1891 to inline it. */
1892 if (DECL_INITIAL (current_function_decl) != 0)
1893 DECL_INITIAL (current_function_decl) = error_mark_node;
1894
1895 DECL_ARGUMENTS (current_function_decl) = 0;
1896 }
1897
1898 /* If we are not at the bottom of the function nesting stack, pop up to
1899 the containing function. Otherwise show we aren't in any function. */
1900 if (--function_nesting_depth != 0)
1901 pop_function_context ();
1902 else
1903 current_function_decl = 0;
1904 }
1905 \f
1906 /* Return a definition for a builtin function named NAME and whose data type
1907 is TYPE. TYPE should be a function type with argument types.
1908 FUNCTION_CODE tells later passes how to compile calls to this function.
1909 See tree.h for its possible values.
1910
1911 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1912 the name to be called if we can't opencode the function. */
1913
1914 tree
1915 builtin_function (name, type, function_code, class, library_name)
1916 const char *name;
1917 tree type;
1918 int function_code;
1919 enum built_in_class class;
1920 const char *library_name;
1921 {
1922 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1923
1924 DECL_EXTERNAL (decl) = 1;
1925 TREE_PUBLIC (decl) = 1;
1926 if (library_name)
1927 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1928
1929 pushdecl (decl);
1930 DECL_BUILT_IN_CLASS (decl) = class;
1931 DECL_FUNCTION_CODE (decl) = function_code;
1932 return decl;
1933 }
1934
1935 /* Return an integer type with the number of bits of precision given by
1936 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1937 it is a signed type. */
1938
1939 tree
1940 gnat_type_for_size (precision, unsignedp)
1941 unsigned precision;
1942 int unsignedp;
1943 {
1944 tree t;
1945 char type_name[20];
1946
1947 if (precision <= 2 * MAX_BITS_PER_WORD
1948 && signed_and_unsigned_types[precision][unsignedp] != 0)
1949 return signed_and_unsigned_types[precision][unsignedp];
1950
1951 if (unsignedp)
1952 t = make_unsigned_type (precision);
1953 else
1954 t = make_signed_type (precision);
1955
1956 if (precision <= 2 * MAX_BITS_PER_WORD)
1957 signed_and_unsigned_types[precision][unsignedp] = t;
1958
1959 if (TYPE_NAME (t) == 0)
1960 {
1961 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1962 TYPE_NAME (t) = get_identifier (type_name);
1963 }
1964
1965 return t;
1966 }
1967
1968 /* Likewise for floating-point types. */
1969
1970 static tree
1971 float_type_for_size (precision, mode)
1972 int precision;
1973 enum machine_mode mode;
1974 {
1975 tree t;
1976 char type_name[20];
1977
1978 if (float_types[(int) mode] != 0)
1979 return float_types[(int) mode];
1980
1981 float_types[(int) mode] = t = make_node (REAL_TYPE);
1982 TYPE_PRECISION (t) = precision;
1983 layout_type (t);
1984
1985 if (TYPE_MODE (t) != mode)
1986 gigi_abort (414);
1987
1988 if (TYPE_NAME (t) == 0)
1989 {
1990 sprintf (type_name, "FLOAT_%d", precision);
1991 TYPE_NAME (t) = get_identifier (type_name);
1992 }
1993
1994 return t;
1995 }
1996
1997 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1998 an unsigned type; otherwise a signed type is returned. */
1999
2000 tree
2001 gnat_type_for_mode (mode, unsignedp)
2002 enum machine_mode mode;
2003 int unsignedp;
2004 {
2005 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2006 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2007 else
2008 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2009 }
2010
2011 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2012
2013 tree
2014 gnat_unsigned_type (type_node)
2015 tree type_node;
2016 {
2017 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2018
2019 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2020 {
2021 type = copy_node (type);
2022 TREE_TYPE (type) = type_node;
2023 }
2024 else if (TREE_TYPE (type_node) != 0
2025 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2026 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2027 {
2028 type = copy_node (type);
2029 TREE_TYPE (type) = TREE_TYPE (type_node);
2030 }
2031
2032 return type;
2033 }
2034
2035 /* Return the signed version of a TYPE_NODE, a scalar type. */
2036
2037 tree
2038 gnat_signed_type (type_node)
2039 tree type_node;
2040 {
2041 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2042
2043 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2044 {
2045 type = copy_node (type);
2046 TREE_TYPE (type) = type_node;
2047 }
2048 else if (TREE_TYPE (type_node) != 0
2049 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2050 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2051 {
2052 type = copy_node (type);
2053 TREE_TYPE (type) = TREE_TYPE (type_node);
2054 }
2055
2056 return type;
2057 }
2058
2059 /* Return a type the same as TYPE except unsigned or signed according to
2060 UNSIGNEDP. */
2061
2062 tree
2063 gnat_signed_or_unsigned_type (unsignedp, type)
2064 int unsignedp;
2065 tree type;
2066 {
2067 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2068 return type;
2069 else
2070 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2071 }
2072 \f
2073 /* EXP is an expression for the size of an object. If this size contains
2074 discriminant references, replace them with the maximum (if MAX_P) or
2075 minimum (if ! MAX_P) possible value of the discriminant. */
2076
2077 tree
2078 max_size (exp, max_p)
2079 tree exp;
2080 int max_p;
2081 {
2082 enum tree_code code = TREE_CODE (exp);
2083 tree type = TREE_TYPE (exp);
2084
2085 switch (TREE_CODE_CLASS (code))
2086 {
2087 case 'd':
2088 case 'c':
2089 return exp;
2090
2091 case 'x':
2092 if (code == TREE_LIST)
2093 return tree_cons (TREE_PURPOSE (exp),
2094 max_size (TREE_VALUE (exp), max_p),
2095 TREE_CHAIN (exp) != 0
2096 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2097 break;
2098
2099 case 'r':
2100 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2101 modify. Otherwise, we abort since it is something we can't
2102 handle. */
2103 if (! contains_placeholder_p (exp))
2104 gigi_abort (406);
2105
2106 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2107 return
2108 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2109
2110 case '<':
2111 return max_p ? size_one_node : size_zero_node;
2112
2113 case '1':
2114 case '2':
2115 case 'e':
2116 switch (TREE_CODE_LENGTH (code))
2117 {
2118 case 1:
2119 if (code == NON_LVALUE_EXPR)
2120 return max_size (TREE_OPERAND (exp, 0), max_p);
2121 else
2122 return
2123 fold (build1 (code, type,
2124 max_size (TREE_OPERAND (exp, 0),
2125 code == NEGATE_EXPR ? ! max_p : max_p)));
2126
2127 case 2:
2128 if (code == RTL_EXPR)
2129 gigi_abort (407);
2130 else if (code == COMPOUND_EXPR)
2131 return max_size (TREE_OPERAND (exp, 1), max_p);
2132 else if (code == WITH_RECORD_EXPR)
2133 return exp;
2134
2135 {
2136 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2137 tree rhs = max_size (TREE_OPERAND (exp, 1),
2138 code == MINUS_EXPR ? ! max_p : max_p);
2139
2140 /* Special-case wanting the maximum value of a MIN_EXPR.
2141 In that case, if one side overflows, return the other.
2142 sizetype is signed, but we know sizes are non-negative.
2143 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2144 overflowing or the maximum possible value and the RHS
2145 a variable. */
2146 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2147 return lhs;
2148 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2149 return rhs;
2150 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2151 && (TREE_OVERFLOW (lhs)
2152 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2153 && ! TREE_CONSTANT (rhs))
2154 return lhs;
2155 else
2156 return fold (build (code, type, lhs, rhs));
2157 }
2158
2159 case 3:
2160 if (code == SAVE_EXPR)
2161 return exp;
2162 else if (code == COND_EXPR)
2163 return fold (build (MAX_EXPR, type,
2164 max_size (TREE_OPERAND (exp, 1), max_p),
2165 max_size (TREE_OPERAND (exp, 2), max_p)));
2166 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2167 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2168 max_size (TREE_OPERAND (exp, 1), max_p));
2169 }
2170 }
2171
2172 gigi_abort (408);
2173 }
2174 \f
2175 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2176 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2177 Return a constructor for the template. */
2178
2179 tree
2180 build_template (template_type, array_type, expr)
2181 tree template_type;
2182 tree array_type;
2183 tree expr;
2184 {
2185 tree template_elts = NULL_TREE;
2186 tree bound_list = NULL_TREE;
2187 tree field;
2188
2189 if (TREE_CODE (array_type) == RECORD_TYPE
2190 && (TYPE_IS_PADDING_P (array_type)
2191 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2192 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2193
2194 if (TREE_CODE (array_type) == ARRAY_TYPE
2195 || (TREE_CODE (array_type) == INTEGER_TYPE
2196 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2197 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2198
2199 /* First make the list for a CONSTRUCTOR for the template. Go down the
2200 field list of the template instead of the type chain because this
2201 array might be an Ada array of arrays and we can't tell where the
2202 nested arrays stop being the underlying object. */
2203
2204 for (field = TYPE_FIELDS (template_type); field;
2205 (bound_list != 0
2206 ? (bound_list = TREE_CHAIN (bound_list))
2207 : (array_type = TREE_TYPE (array_type))),
2208 field = TREE_CHAIN (TREE_CHAIN (field)))
2209 {
2210 tree bounds, min, max;
2211
2212 /* If we have a bound list, get the bounds from there. Likewise
2213 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2214 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2215 This will give us a maximum range. */
2216 if (bound_list != 0)
2217 bounds = TREE_VALUE (bound_list);
2218 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2219 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2220 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2221 && DECL_BY_COMPONENT_PTR_P (expr))
2222 bounds = TREE_TYPE (field);
2223 else
2224 gigi_abort (411);
2225
2226 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2227 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2228
2229 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2230 surround them with a WITH_RECORD_EXPR giving EXPR as the
2231 OBJECT. */
2232 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2233 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2234 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2235 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2236
2237 template_elts = tree_cons (TREE_CHAIN (field), max,
2238 tree_cons (field, min, template_elts));
2239 }
2240
2241 return build_constructor (template_type, nreverse (template_elts));
2242 }
2243 \f
2244 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2245 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2246 in the type contains in its DECL_INITIAL the expression to use when
2247 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2248 to print out an error message if the mechanism cannot be applied to
2249 an object of that type and also for the name. */
2250
2251 tree
2252 build_vms_descriptor (type, mech, gnat_entity)
2253 tree type;
2254 Mechanism_Type mech;
2255 Entity_Id gnat_entity;
2256 {
2257 tree record_type = make_node (RECORD_TYPE);
2258 tree field_list = 0;
2259 int class;
2260 int dtype = 0;
2261 tree inner_type;
2262 int ndim;
2263 int i;
2264 tree *idx_arr;
2265 tree tem;
2266
2267 /* If TYPE is an unconstrained array, use the underlying array type. */
2268 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2269 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2270
2271 /* If this is an array, compute the number of dimensions in the array,
2272 get the index types, and point to the inner type. */
2273 if (TREE_CODE (type) != ARRAY_TYPE)
2274 ndim = 0;
2275 else
2276 for (ndim = 1, inner_type = type;
2277 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2278 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2279 ndim++, inner_type = TREE_TYPE (inner_type))
2280 ;
2281
2282 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2283
2284 if (mech != By_Descriptor_NCA
2285 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2286 for (i = ndim - 1, inner_type = type;
2287 i >= 0;
2288 i--, inner_type = TREE_TYPE (inner_type))
2289 idx_arr[i] = TYPE_DOMAIN (inner_type);
2290 else
2291 for (i = 0, inner_type = type;
2292 i < ndim;
2293 i++, inner_type = TREE_TYPE (inner_type))
2294 idx_arr[i] = TYPE_DOMAIN (inner_type);
2295
2296 /* Now get the DTYPE value. */
2297 switch (TREE_CODE (type))
2298 {
2299 case INTEGER_TYPE:
2300 case ENUMERAL_TYPE:
2301 if (TYPE_VAX_FLOATING_POINT_P (type))
2302 switch ((int) TYPE_DIGITS_VALUE (type))
2303 {
2304 case 6:
2305 dtype = 10;
2306 break;
2307 case 9:
2308 dtype = 11;
2309 break;
2310 case 15:
2311 dtype = 27;
2312 break;
2313 }
2314 else
2315 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2316 {
2317 case 8:
2318 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2319 break;
2320 case 16:
2321 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2322 break;
2323 case 32:
2324 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2325 break;
2326 case 64:
2327 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2328 break;
2329 case 128:
2330 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2331 break;
2332 }
2333 break;
2334
2335 case REAL_TYPE:
2336 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2337 break;
2338
2339 case COMPLEX_TYPE:
2340 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2341 && TYPE_VAX_FLOATING_POINT_P (type))
2342 switch ((int) TYPE_DIGITS_VALUE (type))
2343 {
2344 case 6:
2345 dtype = 12;
2346 break;
2347 case 9:
2348 dtype = 13;
2349 break;
2350 case 15:
2351 dtype = 29;
2352 }
2353 else
2354 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2355 break;
2356
2357 case ARRAY_TYPE:
2358 dtype = 14;
2359 break;
2360
2361 default:
2362 break;
2363 }
2364
2365 /* Get the CLASS value. */
2366 switch (mech)
2367 {
2368 case By_Descriptor_A:
2369 class = 4;
2370 break;
2371 case By_Descriptor_NCA:
2372 class = 10;
2373 break;
2374 case By_Descriptor_SB:
2375 class = 15;
2376 break;
2377 default:
2378 class = 1;
2379 }
2380
2381 /* Make the type for a descriptor for VMS. The first four fields
2382 are the same for all types. */
2383
2384 field_list
2385 = chainon (field_list,
2386 make_descriptor_field
2387 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2388 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2389
2390 field_list = chainon (field_list,
2391 make_descriptor_field ("DTYPE",
2392 gnat_type_for_size (8, 1),
2393 record_type, size_int (dtype)));
2394 field_list = chainon (field_list,
2395 make_descriptor_field ("CLASS",
2396 gnat_type_for_size (8, 1),
2397 record_type, size_int (class)));
2398
2399 field_list
2400 = chainon (field_list,
2401 make_descriptor_field ("POINTER",
2402 build_pointer_type (type),
2403 record_type,
2404 build1 (ADDR_EXPR,
2405 build_pointer_type (type),
2406 build (PLACEHOLDER_EXPR,
2407 type))));
2408
2409 switch (mech)
2410 {
2411 case By_Descriptor:
2412 case By_Descriptor_S:
2413 break;
2414
2415 case By_Descriptor_SB:
2416 field_list
2417 = chainon (field_list,
2418 make_descriptor_field
2419 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2420 TREE_CODE (type) == ARRAY_TYPE
2421 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2422 field_list
2423 = chainon (field_list,
2424 make_descriptor_field
2425 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2426 TREE_CODE (type) == ARRAY_TYPE
2427 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2428 break;
2429
2430 case By_Descriptor_A:
2431 case By_Descriptor_NCA:
2432 field_list = chainon (field_list,
2433 make_descriptor_field ("SCALE",
2434 gnat_type_for_size (8, 1),
2435 record_type,
2436 size_zero_node));
2437
2438 field_list = chainon (field_list,
2439 make_descriptor_field ("DIGITS",
2440 gnat_type_for_size (8, 1),
2441 record_type,
2442 size_zero_node));
2443
2444 field_list
2445 = chainon (field_list,
2446 make_descriptor_field
2447 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2448 size_int (mech == By_Descriptor_NCA
2449 ? 0
2450 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2451 : (TREE_CODE (type) == ARRAY_TYPE
2452 && TYPE_CONVENTION_FORTRAN_P (type)
2453 ? 224 : 192))));
2454
2455 field_list = chainon (field_list,
2456 make_descriptor_field ("DIMCT",
2457 gnat_type_for_size (8, 1),
2458 record_type,
2459 size_int (ndim)));
2460
2461 field_list = chainon (field_list,
2462 make_descriptor_field ("ARSIZE",
2463 gnat_type_for_size (32, 1),
2464 record_type,
2465 size_in_bytes (type)));
2466
2467 /* Now build a pointer to the 0,0,0... element. */
2468 tem = build (PLACEHOLDER_EXPR, type);
2469 for (i = 0, inner_type = type; i < ndim;
2470 i++, inner_type = TREE_TYPE (inner_type))
2471 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2472 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2473
2474 field_list
2475 = chainon (field_list,
2476 make_descriptor_field
2477 ("A0", build_pointer_type (inner_type), record_type,
2478 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2479
2480 /* Next come the addressing coefficients. */
2481 tem = size_int (1);
2482 for (i = 0; i < ndim; i++)
2483 {
2484 char fname[3];
2485 tree idx_length
2486 = size_binop (MULT_EXPR, tem,
2487 size_binop (PLUS_EXPR,
2488 size_binop (MINUS_EXPR,
2489 TYPE_MAX_VALUE (idx_arr[i]),
2490 TYPE_MIN_VALUE (idx_arr[i])),
2491 size_int (1)));
2492
2493 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2494 fname[1] = '0' + i, fname[2] = 0;
2495 field_list
2496 = chainon (field_list,
2497 make_descriptor_field (fname,
2498 gnat_type_for_size (32, 1),
2499 record_type, idx_length));
2500
2501 if (mech == By_Descriptor_NCA)
2502 tem = idx_length;
2503 }
2504
2505 /* Finally here are the bounds. */
2506 for (i = 0; i < ndim; i++)
2507 {
2508 char fname[3];
2509
2510 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2511 field_list
2512 = chainon (field_list,
2513 make_descriptor_field
2514 (fname, gnat_type_for_size (32, 1), record_type,
2515 TYPE_MIN_VALUE (idx_arr[i])));
2516
2517 fname[0] = 'U';
2518 field_list
2519 = chainon (field_list,
2520 make_descriptor_field
2521 (fname, gnat_type_for_size (32, 1), record_type,
2522 TYPE_MAX_VALUE (idx_arr[i])));
2523 }
2524 break;
2525
2526 default:
2527 post_error ("unsupported descriptor type for &", gnat_entity);
2528 }
2529
2530 finish_record_type (record_type, field_list, 0, 1);
2531 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2532 record_type));
2533
2534 return record_type;
2535 }
2536
2537 /* Utility routine for above code to make a field. */
2538
2539 static tree
2540 make_descriptor_field (name, type, rec_type, initial)
2541 const char *name;
2542 tree type;
2543 tree rec_type;
2544 tree initial;
2545 {
2546 tree field
2547 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2548
2549 DECL_INITIAL (field) = initial;
2550 return field;
2551 }
2552 \f
2553 /* Build a type to be used to represent an aliased object whose nominal
2554 type is an unconstrained array. This consists of a RECORD_TYPE containing
2555 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2556 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2557 is used to represent an arbitrary unconstrained object. Use NAME
2558 as the name of the record. */
2559
2560 tree
2561 build_unc_object_type (template_type, object_type, name)
2562 tree template_type;
2563 tree object_type;
2564 tree name;
2565 {
2566 tree type = make_node (RECORD_TYPE);
2567 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2568 template_type, type, 0, 0, 0, 1);
2569 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2570 type, 0, 0, 0, 1);
2571
2572 TYPE_NAME (type) = name;
2573 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2574 finish_record_type (type,
2575 chainon (chainon (NULL_TREE, template_field),
2576 array_field),
2577 0, 0);
2578
2579 return type;
2580 }
2581 \f
2582 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2583 the normal case this is just two adjustments, but we have more to do
2584 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2585
2586 void
2587 update_pointer_to (old_type, new_type)
2588 tree old_type;
2589 tree new_type;
2590 {
2591 tree ptr = TYPE_POINTER_TO (old_type);
2592 tree ref = TYPE_REFERENCE_TO (old_type);
2593 tree type;
2594
2595 /* If this is the main variant, process all the other variants first. */
2596 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2597 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2598 type = TYPE_NEXT_VARIANT (type))
2599 update_pointer_to (type, new_type);
2600
2601 /* If no pointer or reference, we are done. Otherwise, get the new type with
2602 the same qualifiers as the old type and see if it is the same as the old
2603 type. */
2604 if (ptr == 0 && ref == 0)
2605 return;
2606
2607 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2608 if (old_type == new_type)
2609 return;
2610
2611 /* First handle the simple case. */
2612 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2613 {
2614 if (ptr != 0)
2615 TREE_TYPE (ptr) = new_type;
2616 TYPE_POINTER_TO (new_type) = ptr;
2617
2618 if (ref != 0)
2619 TREE_TYPE (ref) = new_type;
2620 TYPE_REFERENCE_TO (new_type) = ref;
2621
2622 if (ptr != 0 && TYPE_NAME (ptr) != 0
2623 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2624 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2625 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2626 global_bindings_p (), 0);
2627 if (ref != 0 && TYPE_NAME (ref) != 0
2628 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2629 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2630 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2631 global_bindings_p (), 0);
2632 }
2633
2634 /* Now deal with the unconstrained array case. In this case the "pointer"
2635 is actually a RECORD_TYPE where the types of both fields are
2636 pointers to void. In that case, copy the field list from the
2637 old type to the new one and update the fields' context. */
2638 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2639 gigi_abort (412);
2640
2641 else
2642 {
2643 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2644 tree ptr_temp_type;
2645 tree new_ref;
2646 tree var;
2647
2648 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2649 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2650 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2651
2652 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2653 template bounds.
2654
2655 ??? This is now the only use of gnat_substitute_in_type, which
2656 is now a very "heavy" routine to do this, so it should be replaced
2657 at some point. */
2658 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2659 new_ref = build (COMPONENT_REF, ptr_temp_type,
2660 build (PLACEHOLDER_EXPR, ptr),
2661 TREE_CHAIN (TYPE_FIELDS (ptr)));
2662
2663 update_pointer_to
2664 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2665 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2666 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2667
2668 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2669 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2670
2671 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2672 = TREE_TYPE (new_type) = ptr;
2673
2674 /* Now handle updating the allocation record, what the thin pointer
2675 points to. Update all pointers from the old record into the new
2676 one, update the types of the fields, and recompute the size. */
2677
2678 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2679
2680 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2681 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2682 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2683 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2684 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2685 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2686 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2687
2688 TYPE_SIZE (new_obj_rec)
2689 = size_binop (PLUS_EXPR,
2690 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2691 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2692 TYPE_SIZE_UNIT (new_obj_rec)
2693 = size_binop (PLUS_EXPR,
2694 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2695 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2696 rest_of_type_compilation (ptr, global_bindings_p ());
2697 }
2698 }
2699 \f
2700 /* Convert a pointer to a constrained array into a pointer to a fat
2701 pointer. This involves making or finding a template. */
2702
2703 static tree
2704 convert_to_fat_pointer (type, expr)
2705 tree type;
2706 tree expr;
2707 {
2708 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2709 tree template, template_addr;
2710 tree etype = TREE_TYPE (expr);
2711
2712 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2713 pointer to the template and array. */
2714 if (integer_zerop (expr))
2715 return
2716 build_constructor
2717 (type,
2718 tree_cons (TYPE_FIELDS (type),
2719 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2720 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2721 convert (build_pointer_type (template_type),
2722 expr),
2723 NULL_TREE)));
2724
2725 /* If EXPR is a thin pointer, make the template and data from the record. */
2726
2727 else if (TYPE_THIN_POINTER_P (etype))
2728 {
2729 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2730
2731 expr = save_expr (expr);
2732 if (TREE_CODE (expr) == ADDR_EXPR)
2733 expr = TREE_OPERAND (expr, 0);
2734 else
2735 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2736
2737 template = build_component_ref (expr, NULL_TREE, fields);
2738 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2739 build_component_ref (expr, NULL_TREE,
2740 TREE_CHAIN (fields)));
2741 }
2742 else
2743 /* Otherwise, build the constructor for the template. */
2744 template = build_template (template_type, TREE_TYPE (etype), expr);
2745
2746 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2747
2748 /* The result is a CONSTRUCTOR for the fat pointer. */
2749 return
2750 build_constructor (type,
2751 tree_cons (TYPE_FIELDS (type), expr,
2752 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2753 template_addr, NULL_TREE)));
2754 }
2755 \f
2756 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2757 is something that is a fat pointer, so convert to it first if it EXPR
2758 is not already a fat pointer. */
2759
2760 static tree
2761 convert_to_thin_pointer (type, expr)
2762 tree type;
2763 tree expr;
2764 {
2765 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2766 expr
2767 = convert_to_fat_pointer
2768 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2769
2770 /* We get the pointer to the data and use a NOP_EXPR to make it the
2771 proper GCC type. */
2772 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2773 expr = build1 (NOP_EXPR, type, expr);
2774
2775 return expr;
2776 }
2777 \f
2778 /* Create an expression whose value is that of EXPR,
2779 converted to type TYPE. The TREE_TYPE of the value
2780 is always TYPE. This function implements all reasonable
2781 conversions; callers should filter out those that are
2782 not permitted by the language being compiled. */
2783
2784 tree
2785 convert (type, expr)
2786 tree type, expr;
2787 {
2788 enum tree_code code = TREE_CODE (type);
2789 tree etype = TREE_TYPE (expr);
2790 enum tree_code ecode = TREE_CODE (etype);
2791 tree tem;
2792
2793 /* If EXPR is already the right type, we are done. */
2794 if (type == etype)
2795 return expr;
2796
2797 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2798 new one. */
2799 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2800 return build (WITH_RECORD_EXPR, type,
2801 convert (type, TREE_OPERAND (expr, 0)),
2802 TREE_OPERAND (expr, 1));
2803
2804 /* If the input type has padding, remove it by doing a component reference
2805 to the field. If the output type has padding, make a constructor
2806 to build the record. If both input and output have padding and are
2807 of variable size, do this as an unchecked conversion. */
2808 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2809 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2810 && (! TREE_CONSTANT (TYPE_SIZE (type))
2811 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2812 ;
2813 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2814 {
2815 /* If we have just converted to this padded type, just get
2816 the inner expression. */
2817 if (TREE_CODE (expr) == CONSTRUCTOR
2818 && CONSTRUCTOR_ELTS (expr) != 0
2819 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2820 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2821 else
2822 return convert (type, build_component_ref (expr, NULL_TREE,
2823 TYPE_FIELDS (etype)));
2824 }
2825 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2826 {
2827 /* If we previously converted from another type and our type is
2828 of variable size, remove the conversion to avoid the need for
2829 variable-size temporaries. */
2830 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2831 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2832 expr = TREE_OPERAND (expr, 0);
2833
2834 /* If we are just removing the padding from expr, convert the original
2835 object if we have variable size. That will avoid the need
2836 for some variable-size temporaries. */
2837 if (TREE_CODE (expr) == COMPONENT_REF
2838 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2839 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2840 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2841 return convert (type, TREE_OPERAND (expr, 0));
2842
2843 /* If the result type is a padded type with a self-referentially-sized
2844 field and the expression type is a record, do this as an
2845 unchecked converstion. */
2846 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2847 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2848 && TREE_CODE (etype) == RECORD_TYPE)
2849 return unchecked_convert (type, expr);
2850
2851 else
2852 return
2853 build_constructor (type,
2854 tree_cons (TYPE_FIELDS (type),
2855 convert (TREE_TYPE
2856 (TYPE_FIELDS (type)),
2857 expr),
2858 NULL_TREE));
2859 }
2860
2861 /* If the input is a biased type, adjust first. */
2862 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2863 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2864 fold (build1 (GNAT_NOP_EXPR,
2865 TREE_TYPE (etype), expr)),
2866 TYPE_MIN_VALUE (etype))));
2867
2868 /* If the input is a left-justified modular type, we need to extract
2869 the actual object before converting it to any other type with the
2870 exception of an unconstrained array. */
2871 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2872 && code != UNCONSTRAINED_ARRAY_TYPE)
2873 return convert (type, build_component_ref (expr, NULL_TREE,
2874 TYPE_FIELDS (etype)));
2875
2876 /* If converting a type that does not contain a template into one
2877 that does, convert to the data type and then build the template. */
2878 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2879 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2880 {
2881 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2882
2883 return
2884 build_constructor
2885 (type,
2886 tree_cons (TYPE_FIELDS (type),
2887 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2888 obj_type, NULL_TREE),
2889 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2890 convert (obj_type, expr), NULL_TREE)));
2891 }
2892
2893 /* There are some special cases of expressions that we process
2894 specially. */
2895 switch (TREE_CODE (expr))
2896 {
2897 case ERROR_MARK:
2898 return expr;
2899
2900 case TRANSFORM_EXPR:
2901 case NULL_EXPR:
2902 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2903 conversion in gnat_expand_expr. NULL_EXPR does not represent
2904 and actual value, so no conversion is needed. */
2905 TREE_TYPE (expr) = type;
2906 return expr;
2907
2908 case STRING_CST:
2909 case CONSTRUCTOR:
2910 /* If we are converting a STRING_CST to another constrained array type,
2911 just make a new one in the proper type. Likewise for a
2912 CONSTRUCTOR. But if the mode of the type is different, we must
2913 ensure a new RTL is made for the constant. */
2914 if (code == ecode && AGGREGATE_TYPE_P (etype)
2915 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2916 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2917 {
2918 expr = copy_node (expr);
2919 TREE_TYPE (expr) = type;
2920
2921 if (TYPE_MODE (type) != TYPE_MODE (etype))
2922 TREE_CST_RTL (expr) = 0;
2923
2924 return expr;
2925 }
2926 break;
2927
2928 case COMPONENT_REF:
2929 /* If we are converting between two aggregate types of the same
2930 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2931 This avoid unneeded conversions which makes reference computations
2932 more complex. */
2933 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2934 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2935 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2936 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2937 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2938 TREE_OPERAND (expr, 1));
2939
2940 break;
2941
2942 case UNCONSTRAINED_ARRAY_REF:
2943 /* Convert this to the type of the inner array by getting the address of
2944 the array from the template. */
2945 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2946 build_component_ref (TREE_OPERAND (expr, 0),
2947 get_identifier ("P_ARRAY"),
2948 NULL_TREE));
2949 etype = TREE_TYPE (expr);
2950 ecode = TREE_CODE (etype);
2951 break;
2952
2953 case VIEW_CONVERT_EXPR:
2954 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2955 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2956 return convert (type, TREE_OPERAND (expr, 0));
2957 break;
2958
2959 case INDIRECT_REF:
2960 /* If both types are record types, just convert the pointer and
2961 make a new INDIRECT_REF.
2962
2963 ??? Disable this for now since it causes problems with the
2964 code in build_binary_op for MODIFY_EXPR which wants to
2965 strip off conversions. But that code really is a mess and
2966 we need to do this a much better way some time. */
2967 if (0
2968 && (TREE_CODE (type) == RECORD_TYPE
2969 || TREE_CODE (type) == UNION_TYPE)
2970 && (TREE_CODE (etype) == RECORD_TYPE
2971 || TREE_CODE (etype) == UNION_TYPE)
2972 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2973 return build_unary_op (INDIRECT_REF, NULL_TREE,
2974 convert (build_pointer_type (type),
2975 TREE_OPERAND (expr, 0)));
2976 break;
2977
2978 default:
2979 break;
2980 }
2981
2982 /* Check for converting to a pointer to an unconstrained array. */
2983 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2984 return convert_to_fat_pointer (type, expr);
2985
2986 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2987 || (code == INTEGER_CST && ecode == INTEGER_CST
2988 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2989 return fold (build1 (NOP_EXPR, type, expr));
2990
2991 switch (code)
2992 {
2993 case VOID_TYPE:
2994 return build1 (CONVERT_EXPR, type, expr);
2995
2996 case INTEGER_TYPE:
2997 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2998 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2999 return unchecked_convert (type, expr);
3000 else if (TYPE_BIASED_REPRESENTATION_P (type))
3001 return fold (build1 (CONVERT_EXPR, type,
3002 fold (build (MINUS_EXPR, TREE_TYPE (type),
3003 convert (TREE_TYPE (type), expr),
3004 TYPE_MIN_VALUE (type)))));
3005
3006 /* ... fall through ... */
3007
3008 case ENUMERAL_TYPE:
3009 return fold (convert_to_integer (type, expr));
3010
3011 case POINTER_TYPE:
3012 case REFERENCE_TYPE:
3013 /* If converting between two pointers to records denoting
3014 both a template and type, adjust if needed to account
3015 for any differing offsets, since one might be negative. */
3016 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3017 {
3018 tree bit_diff
3019 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3020 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3021 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3022 sbitsize_int (BITS_PER_UNIT));
3023
3024 expr = build1 (NOP_EXPR, type, expr);
3025 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3026 if (integer_zerop (byte_diff))
3027 return expr;
3028
3029 return build_binary_op (PLUS_EXPR, type, expr,
3030 fold (convert_to_pointer (type, byte_diff)));
3031 }
3032
3033 /* If converting to a thin pointer, handle specially. */
3034 if (TYPE_THIN_POINTER_P (type)
3035 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3036 return convert_to_thin_pointer (type, expr);
3037
3038 /* If converting fat pointer to normal pointer, get the pointer to the
3039 array and then convert it. */
3040 else if (TYPE_FAT_POINTER_P (etype))
3041 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3042 NULL_TREE);
3043
3044 return fold (convert_to_pointer (type, expr));
3045
3046 case REAL_TYPE:
3047 return fold (convert_to_real (type, expr));
3048
3049 case RECORD_TYPE:
3050 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3051 return
3052 build_constructor
3053 (type, tree_cons (TYPE_FIELDS (type),
3054 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3055 NULL_TREE));
3056
3057 /* ... fall through ... */
3058
3059 case ARRAY_TYPE:
3060 /* In these cases, assume the front-end has validated the conversion.
3061 If the conversion is valid, it will be a bit-wise conversion, so
3062 it can be viewed as an unchecked conversion. */
3063 return unchecked_convert (type, expr);
3064
3065 case UNION_TYPE:
3066 /* Just validate that the type is indeed that of a field
3067 of the type. Then make the simple conversion. */
3068 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3069 if (TREE_TYPE (tem) == etype)
3070 return build1 (CONVERT_EXPR, type, expr);
3071
3072 gigi_abort (413);
3073
3074 case UNCONSTRAINED_ARRAY_TYPE:
3075 /* If EXPR is a constrained array, take its address, convert it to a
3076 fat pointer, and then dereference it. Likewise if EXPR is a
3077 record containing both a template and a constrained array.
3078 Note that a record representing a left justified modular type
3079 always represents a packed constrained array. */
3080 if (ecode == ARRAY_TYPE
3081 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3082 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3083 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3084 return
3085 build_unary_op
3086 (INDIRECT_REF, NULL_TREE,
3087 convert_to_fat_pointer (TREE_TYPE (type),
3088 build_unary_op (ADDR_EXPR,
3089 NULL_TREE, expr)));
3090
3091 /* Do something very similar for converting one unconstrained
3092 array to another. */
3093 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3094 return
3095 build_unary_op (INDIRECT_REF, NULL_TREE,
3096 convert (TREE_TYPE (type),
3097 build_unary_op (ADDR_EXPR,
3098 NULL_TREE, expr)));
3099 else
3100 gigi_abort (409);
3101
3102 case COMPLEX_TYPE:
3103 return fold (convert_to_complex (type, expr));
3104
3105 default:
3106 gigi_abort (410);
3107 }
3108 }
3109 \f
3110 /* Remove all conversions that are done in EXP. This includes converting
3111 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3112 is nonzero, always return the address of the containing object even if
3113 the address is not bit-aligned. */
3114
3115 tree
3116 remove_conversions (exp, true_address)
3117 tree exp;
3118 int true_address;
3119 {
3120 switch (TREE_CODE (exp))
3121 {
3122 case CONSTRUCTOR:
3123 if (true_address
3124 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3125 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3126 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3127 break;
3128
3129 case COMPONENT_REF:
3130 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3131 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3132 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3133 break;
3134
3135 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3136 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3137 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3138
3139 default:
3140 break;
3141 }
3142
3143 return exp;
3144 }
3145 \f
3146 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3147 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3148 likewise return an expression pointing to the underlying array. */
3149
3150 tree
3151 maybe_unconstrained_array (exp)
3152 tree exp;
3153 {
3154 enum tree_code code = TREE_CODE (exp);
3155 tree new;
3156
3157 switch (TREE_CODE (TREE_TYPE (exp)))
3158 {
3159 case UNCONSTRAINED_ARRAY_TYPE:
3160 if (code == UNCONSTRAINED_ARRAY_REF)
3161 {
3162 new
3163 = build_unary_op (INDIRECT_REF, NULL_TREE,
3164 build_component_ref (TREE_OPERAND (exp, 0),
3165 get_identifier ("P_ARRAY"),
3166 NULL_TREE));
3167 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3168 return new;
3169 }
3170
3171 else if (code == NULL_EXPR)
3172 return build1 (NULL_EXPR,
3173 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3174 (TREE_TYPE (TREE_TYPE (exp))))),
3175 TREE_OPERAND (exp, 0));
3176
3177 else if (code == WITH_RECORD_EXPR
3178 && (TREE_OPERAND (exp, 0)
3179 != (new = maybe_unconstrained_array
3180 (TREE_OPERAND (exp, 0)))))
3181 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3182 TREE_OPERAND (exp, 1));
3183
3184 case RECORD_TYPE:
3185 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3186 {
3187 new
3188 = build_component_ref (exp, NULL_TREE,
3189 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3190 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3191 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3192 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3193
3194 return new;
3195 }
3196 break;
3197
3198 default:
3199 break;
3200 }
3201
3202 return exp;
3203 }
3204 \f
3205 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3206
3207 tree
3208 unchecked_convert (type, expr)
3209 tree type;
3210 tree expr;
3211 {
3212 tree etype = TREE_TYPE (expr);
3213
3214 /* If the expression is already the right type, we are done. */
3215 if (etype == type)
3216 return expr;
3217
3218 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3219 new one. */
3220 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3221 return build (WITH_RECORD_EXPR, type,
3222 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3223 TREE_OPERAND (expr, 1));
3224
3225 /* If both types types are integral just do a normal conversion.
3226 Likewise for a conversion to an unconstrained array. */
3227 if ((((INTEGRAL_TYPE_P (type)
3228 && ! (TREE_CODE (type) == INTEGER_TYPE
3229 && TYPE_VAX_FLOATING_POINT_P (type)))
3230 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3231 || (TREE_CODE (type) == RECORD_TYPE
3232 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3233 && ((INTEGRAL_TYPE_P (etype)
3234 && ! (TREE_CODE (etype) == INTEGER_TYPE
3235 && TYPE_VAX_FLOATING_POINT_P (etype)))
3236 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3237 || (TREE_CODE (etype) == RECORD_TYPE
3238 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3239 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3240 {
3241 tree rtype = type;
3242
3243 if (TREE_CODE (etype) == INTEGER_TYPE
3244 && TYPE_BIASED_REPRESENTATION_P (etype))
3245 {
3246 tree ntype = copy_type (etype);
3247
3248 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3249 TYPE_MAIN_VARIANT (ntype) = ntype;
3250 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3251 }
3252
3253 if (TREE_CODE (type) == INTEGER_TYPE
3254 && TYPE_BIASED_REPRESENTATION_P (type))
3255 {
3256 rtype = copy_type (type);
3257 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3258 TYPE_MAIN_VARIANT (rtype) = rtype;
3259 }
3260
3261 expr = convert (rtype, expr);
3262 if (type != rtype)
3263 expr = build1 (GNAT_NOP_EXPR, type, expr);
3264 }
3265
3266 /* If we are converting TO an integral type whose precision is not the
3267 same as its size, first unchecked convert to a record that contains
3268 an object of the output type. Then extract the field. */
3269 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3270 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3271 GET_MODE_BITSIZE (TYPE_MODE (type))))
3272 {
3273 tree rec_type = make_node (RECORD_TYPE);
3274 tree field = create_field_decl (get_identifier ("OBJ"), type,
3275 rec_type, 1, 0, 0, 0);
3276
3277 TYPE_FIELDS (rec_type) = field;
3278 layout_type (rec_type);
3279
3280 expr = unchecked_convert (rec_type, expr);
3281 expr = build_component_ref (expr, NULL_TREE, field);
3282 }
3283
3284 /* Similarly for integral input type whose precision is not equal to its
3285 size. */
3286 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3287 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3288 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3289 {
3290 tree rec_type = make_node (RECORD_TYPE);
3291 tree field
3292 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3293 1, 0, 0, 0);
3294
3295 TYPE_FIELDS (rec_type) = field;
3296 layout_type (rec_type);
3297
3298 expr = build_constructor (rec_type, build_tree_list (field, expr));
3299 expr = unchecked_convert (type, expr);
3300 }
3301
3302 /* We have a special case when we are converting between two
3303 unconstrained array types. In that case, take the address,
3304 convert the fat pointer types, and dereference. */
3305 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3306 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3307 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3308 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3309 build_unary_op (ADDR_EXPR, NULL_TREE,
3310 expr)));
3311 else
3312 {
3313 expr = maybe_unconstrained_array (expr);
3314 etype = TREE_TYPE (expr);
3315 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3316 }
3317
3318 /* If the result is an integral type whose size is not equal to
3319 the size of the underlying machine type, sign- or zero-extend
3320 the result. We need not do this in the case where the input is
3321 an integral type of the same precision and signedness or if the output
3322 is a biased type or if both the input and output are unsigned. */
3323 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3324 && ! (TREE_CODE (type) == INTEGER_TYPE
3325 && TYPE_BIASED_REPRESENTATION_P (type))
3326 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3327 GET_MODE_BITSIZE (TYPE_MODE (type)))
3328 && ! (INTEGRAL_TYPE_P (etype)
3329 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3330 && operand_equal_p (TYPE_RM_SIZE (type),
3331 (TYPE_RM_SIZE (etype) != 0
3332 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3333 0))
3334 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3335 {
3336 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3337 TREE_UNSIGNED (type));
3338 tree shift_expr
3339 = convert (base_type,
3340 size_binop (MINUS_EXPR,
3341 bitsize_int
3342 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3343 TYPE_RM_SIZE (type)));
3344 expr
3345 = convert (type,
3346 build_binary_op (RSHIFT_EXPR, base_type,
3347 build_binary_op (LSHIFT_EXPR, base_type,
3348 convert (base_type, expr),
3349 shift_expr),
3350 shift_expr));
3351 }
3352
3353 /* An unchecked conversion should never raise Constraint_Error. The code
3354 below assumes that GCC's conversion routines overflow the same way that
3355 the underlying hardware does. This is probably true. In the rare case
3356 when it is false, we can rely on the fact that such conversions are
3357 erroneous anyway. */
3358 if (TREE_CODE (expr) == INTEGER_CST)
3359 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3360
3361 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3362 show no longer constant. */
3363 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3364 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3365 TREE_CONSTANT (expr) = 0;
3366
3367 return expr;
3368 }