5ataprop.adb, [...]: Fix spelling errors.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * $Revision: 1.6 $
10 * *
11 * Copyright (C) 1992-2001, Free Software Foundation, Inc. *
12 * *
13 * GNAT is free software; you can redistribute it and/or modify it under *
14 * terms of the GNU General Public License as published by the Free Soft- *
15 * ware Foundation; either version 2, or (at your option) any later ver- *
16 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
17 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
18 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
19 * for more details. You should have received a copy of the GNU General *
20 * Public License distributed with GNAT; see file COPYING. If not, write *
21 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
22 * MA 02111-1307, USA. *
23 * *
24 * GNAT was originally developed by the GNAT team at New York University. *
25 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
26 * *
27 ****************************************************************************/
28
29 #include "config.h"
30 #include "system.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "convert.h"
38
39 #include "ada.h"
40 #include "types.h"
41 #include "atree.h"
42 #include "elists.h"
43 #include "namet.h"
44 #include "nlists.h"
45 #include "stringt.h"
46 #include "uintp.h"
47 #include "fe.h"
48 #include "sinfo.h"
49 #include "einfo.h"
50 #include "ada-tree.h"
51 #include "gigi.h"
52
53 #ifndef MAX_FIXED_MODE_SIZE
54 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
55 #endif
56
57 #ifndef MAX_BITS_PER_WORD
58 #define MAX_BITS_PER_WORD BITS_PER_WORD
59 #endif
60
61 /* If nonzero, pretend we are allocating at global level. */
62 int force_global;
63
64 /* Global Variables for the various types we create. */
65 tree gnat_std_decls[(int) ADT_LAST];
66
67 /* Associates a GNAT tree node to a GCC tree node. It is used in
68 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
69 of `save_gnu_tree' for more info. */
70 static tree *associate_gnat_to_gnu;
71
72 /* This listhead is used to record any global objects that need elaboration.
73 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
74 initial value to assign. */
75
76 static tree pending_elaborations;
77
78 /* This stack allows us to momentarily switch to generating elaboration
79 lists for an inner context. */
80
81 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
82
83 /* This variable keeps a table for types for each precision so that we only
84 allocate each of them once. Signed and unsigned types are kept separate.
85
86 Note that these types are only used when fold-const requests something
87 special. Perhaps we should NOT share these types; we'll see how it
88 goes later. */
89 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
90
91 /* Likewise for float types, but record these by mode. */
92 static tree float_types[NUM_MACHINE_MODES];
93
94 /* For each binding contour we allocate a binding_level structure which records
95 the entities defined or declared in that contour. Contours include:
96
97 the global one
98 one for each subprogram definition
99 one for each compound statement (declare block)
100
101 Binding contours are used to create GCC tree BLOCK nodes. */
102
103 struct binding_level
104 {
105 /* A chain of ..._DECL nodes for all variables, constants, functions,
106 parameters and type declarations. These ..._DECL nodes are chained
107 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
108 in the reverse of the order supplied to be compatible with the
109 back-end. */
110 tree names;
111 /* For each level (except the global one), a chain of BLOCK nodes for all
112 the levels that were entered and exited one level down from this one. */
113 tree blocks;
114 /* The BLOCK node for this level, if one has been preallocated.
115 If 0, the BLOCK is allocated (if needed) when the level is popped. */
116 tree this_block;
117 /* The binding level containing this one (the enclosing binding level). */
118 struct binding_level *level_chain;
119 };
120
121 /* The binding level currently in effect. */
122 static struct binding_level *current_binding_level = NULL;
123
124 /* A chain of binding_level structures awaiting reuse. */
125 static struct binding_level *free_binding_level = NULL;
126
127 /* The outermost binding level. This binding level is created when the
128 compiler is started and it will exist through the entire compilation. */
129 static struct binding_level *global_binding_level;
130
131 /* Binding level structures are initialized by copying this one. */
132 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
133
134
135 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
136 static tree compute_related_constant PARAMS ((tree, tree));
137 static tree split_plus PARAMS ((tree, tree *));
138 static int value_zerop PARAMS ((tree));
139 static tree float_type_for_size PARAMS ((int, enum machine_mode));
140 static tree convert_to_fat_pointer PARAMS ((tree, tree));
141 static tree convert_to_thin_pointer PARAMS ((tree, tree));
142 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
143 tree));
144 static void mark_binding_level PARAMS((PTR));
145 static void mark_e_stack PARAMS((PTR));
146 \f
147 /* Initialize the association of GNAT nodes to GCC trees. */
148
149 void
150 init_gnat_to_gnu ()
151 {
152 Node_Id gnat_node;
153
154 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
155 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
156
157 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
158 associate_gnat_to_gnu [gnat_node] = NULL_TREE;
159
160 associate_gnat_to_gnu -= First_Node_Id;
161
162 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
163 ggc_add_tree_root (&pending_elaborations, 1);
164 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
165 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
166 (sizeof signed_and_unsigned_types
167 / sizeof signed_and_unsigned_types[0][0]));
168 ggc_add_tree_root (float_types, sizeof float_types / sizeof float_types[0]);
169
170 ggc_add_root (&current_binding_level, 1, sizeof current_binding_level,
171 mark_binding_level);
172 }
173
174 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
175 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
176 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
177
178 If GNU_DECL is zero, a previous association is to be reset. */
179
180 void
181 save_gnu_tree (gnat_entity, gnu_decl, no_check)
182 Entity_Id gnat_entity;
183 tree gnu_decl;
184 int no_check;
185 {
186 if (gnu_decl
187 && (associate_gnat_to_gnu [gnat_entity]
188 || (! no_check && ! DECL_P (gnu_decl))))
189 gigi_abort (401);
190
191 associate_gnat_to_gnu [gnat_entity] = gnu_decl;
192 }
193
194 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
195 Return the ..._DECL node that was associated with it. If there is no tree
196 node associated with GNAT_ENTITY, abort.
197
198 In some cases, such as delayed elaboration or expressions that need to
199 be elaborated only once, GNAT_ENTITY is really not an entity. */
200
201 tree
202 get_gnu_tree (gnat_entity)
203 Entity_Id gnat_entity;
204 {
205 if (! associate_gnat_to_gnu [gnat_entity])
206 gigi_abort (402);
207
208 return associate_gnat_to_gnu [gnat_entity];
209 }
210
211 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
212
213 int
214 present_gnu_tree (gnat_entity)
215 Entity_Id gnat_entity;
216 {
217 return (associate_gnat_to_gnu [gnat_entity] != NULL_TREE);
218 }
219
220 \f
221 /* Return non-zero if we are currently in the global binding level. */
222
223 int
224 global_bindings_p ()
225 {
226 return (force_global != 0 || current_binding_level == global_binding_level
227 ? -1 : 0);
228 }
229
230 /* Return the list of declarations in the current level. Note that this list
231 is in reverse order (it has to be so for back-end compatibility). */
232
233 tree
234 getdecls ()
235 {
236 return current_binding_level->names;
237 }
238
239 /* Nonzero if the current level needs to have a BLOCK made. */
240
241 int
242 kept_level_p ()
243 {
244 return (current_binding_level->names != 0);
245 }
246
247 /* Enter a new binding level. The input parameter is ignored, but has to be
248 specified for back-end compatibility. */
249
250 void
251 pushlevel (ignore)
252 int ignore ATTRIBUTE_UNUSED;
253 {
254 struct binding_level *newlevel = NULL;
255
256 /* Reuse a struct for this binding level, if there is one. */
257 if (free_binding_level)
258 {
259 newlevel = free_binding_level;
260 free_binding_level = free_binding_level->level_chain;
261 }
262 else
263 newlevel
264 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
265
266 *newlevel = clear_binding_level;
267
268 /* Add this level to the front of the chain (stack) of levels that are
269 active. */
270 newlevel->level_chain = current_binding_level;
271 current_binding_level = newlevel;
272 }
273
274 /* Exit a binding level.
275 Pop the level off, and restore the state of the identifier-decl mappings
276 that were in effect when this level was entered.
277
278 If KEEP is nonzero, this level had explicit declarations, so
279 and create a "block" (a BLOCK node) for the level
280 to record its declarations and subblocks for symbol table output.
281
282 If FUNCTIONBODY is nonzero, this level is the body of a function,
283 so create a block as if KEEP were set and also clear out all
284 label names.
285
286 If REVERSE is nonzero, reverse the order of decls before putting
287 them into the BLOCK. */
288
289 tree
290 poplevel (keep, reverse, functionbody)
291 int keep;
292 int reverse;
293 int functionbody;
294 {
295 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
296 binding level that we are about to exit and which is returned by this
297 routine. */
298 tree block = NULL_TREE;
299 tree decl_chain;
300 tree decl_node;
301 tree subblock_chain = current_binding_level->blocks;
302 tree subblock_node;
303 int block_previously_created;
304
305 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
306 nodes chained through the `names' field of current_binding_level are in
307 reverse order except for PARM_DECL node, which are explicitly stored in
308 the right order. */
309 current_binding_level->names
310 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
311 : current_binding_level->names;
312
313 /* Output any nested inline functions within this block which must be
314 compiled because their address is needed. */
315 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
316 if (TREE_CODE (decl_node) == FUNCTION_DECL
317 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
318 && DECL_INITIAL (decl_node) != 0)
319 {
320 push_function_context ();
321 output_inline_function (decl_node);
322 pop_function_context ();
323 }
324
325 block = 0;
326 block_previously_created = (current_binding_level->this_block != 0);
327 if (block_previously_created)
328 block = current_binding_level->this_block;
329 else if (keep || functionbody)
330 block = make_node (BLOCK);
331 if (block != 0)
332 {
333 BLOCK_VARS (block) = keep ? decl_chain : 0;
334 BLOCK_SUBBLOCKS (block) = subblock_chain;
335 }
336
337 /* Record the BLOCK node just built as the subblock its enclosing scope. */
338 for (subblock_node = subblock_chain; subblock_node;
339 subblock_node = TREE_CHAIN (subblock_node))
340 BLOCK_SUPERCONTEXT (subblock_node) = block;
341
342 /* Clear out the meanings of the local variables of this level. */
343
344 for (subblock_node = decl_chain; subblock_node;
345 subblock_node = TREE_CHAIN (subblock_node))
346 if (DECL_NAME (subblock_node) != 0)
347 /* If the identifier was used or addressed via a local extern decl,
348 don't forget that fact. */
349 if (DECL_EXTERNAL (subblock_node))
350 {
351 if (TREE_USED (subblock_node))
352 TREE_USED (DECL_NAME (subblock_node)) = 1;
353 if (TREE_ADDRESSABLE (subblock_node))
354 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
355 }
356
357 {
358 /* Pop the current level, and free the structure for reuse. */
359 struct binding_level *level = current_binding_level;
360 current_binding_level = current_binding_level->level_chain;
361 level->level_chain = free_binding_level;
362 free_binding_level = level;
363 }
364
365 if (functionbody)
366 {
367 /* This is the top level block of a function. The ..._DECL chain stored
368 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
369 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 instead. */
371 DECL_INITIAL (current_function_decl) = block;
372 BLOCK_VARS (block) = 0;
373 }
374 else if (block)
375 {
376 if (!block_previously_created)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, block);
379 }
380
381 /* If we did not make a block for the level just exited, any blocks made for
382 inner levels (since they cannot be recorded as subblocks in that level)
383 must be carried forward so they will later become subblocks of something
384 else. */
385 else if (subblock_chain)
386 current_binding_level->blocks
387 = chainon (current_binding_level->blocks, subblock_chain);
388 if (block)
389 TREE_USED (block) = 1;
390
391 return block;
392 }
393 \f
394 /* Insert BLOCK at the end of the list of subblocks of the
395 current binding level. This is used when a BIND_EXPR is expanded,
396 to handle the BLOCK node inside the BIND_EXPR. */
397
398 void
399 insert_block (block)
400 tree block;
401 {
402 TREE_USED (block) = 1;
403 current_binding_level->blocks
404 = chainon (current_binding_level->blocks, block);
405 }
406
407 /* Set the BLOCK node for the innermost scope
408 (the one we are currently in). */
409
410 void
411 set_block (block)
412 tree block;
413 {
414 current_binding_level->this_block = block;
415 current_binding_level->names = chainon (current_binding_level->names,
416 BLOCK_VARS (block));
417 current_binding_level->blocks = chainon (current_binding_level->blocks,
418 BLOCK_SUBBLOCKS (block));
419 }
420
421 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
422 Returns the ..._DECL node. */
423
424 tree
425 pushdecl (decl)
426 tree decl;
427 {
428 struct binding_level *b;
429
430 /* If at top level, there is no context. But PARM_DECLs always go in the
431 level of its function. */
432 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
433 {
434 b = global_binding_level;
435 DECL_CONTEXT (decl) = 0;
436 }
437 else
438 {
439 b = current_binding_level;
440 DECL_CONTEXT (decl) = current_function_decl;
441 }
442
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later if necessary. This needs to be
445 this way for compatibility with the back-end.
446
447 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
448 will cause trouble with the debugger and aren't needed anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 {
452 TREE_CHAIN (decl) = b->names;
453 b->names = decl;
454 }
455
456 /* For the declaration of a type, set its name if it either is not already
457 set, was set to an IDENTIFIER_NODE, indicating an internal name,
458 or if the previous type name was not derived from a source name.
459 We'd rather have the type named with a real name and all the pointer
460 types to the same object have the same POINTER_TYPE node. Code in this
461 function in c-decl.c makes a copy of the type node here, but that may
462 cause us trouble with incomplete types, so let's not try it (at least
463 for now). */
464
465 if (TREE_CODE (decl) == TYPE_DECL
466 && DECL_NAME (decl) != 0
467 && (TYPE_NAME (TREE_TYPE (decl)) == 0
468 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
469 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
470 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
471 && ! DECL_ARTIFICIAL (decl))))
472 TYPE_NAME (TREE_TYPE (decl)) = decl;
473
474 return decl;
475 }
476 \f
477 /* Do little here. Set up the standard declarations later after the
478 front end has been run. */
479
480 void
481 gnat_init_decl_processing ()
482 {
483 lineno = 0;
484
485 /* incomplete_decl_finalize_hook is defined in toplev.c. It needs to be set
486 by each front end to the appropriate routine that handles incomplete
487 VAR_DECL nodes. This routine will be invoked by compile_file when a
488 VAR_DECL node of DECL_SIZE zero is encountered. */
489 incomplete_decl_finalize_hook = finish_incomplete_decl;
490
491 /* Make the binding_level structure for global names. */
492 current_function_decl = 0;
493 current_binding_level = 0;
494 free_binding_level = 0;
495 pushlevel (0);
496 global_binding_level = current_binding_level;
497
498 build_common_tree_nodes (0);
499
500 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
501 corresponding to the size of ptr_mode. Make this here since we need
502 this before we can expand the GNAT types. */
503 set_sizetype (type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
504 build_common_tree_nodes_2 (0);
505
506 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
507
508 /* We need to make the integer type before doing anything else.
509 We stitch this in to the appropriate GNAT type later. */
510 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
511 integer_type_node));
512 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
513 char_type_node));
514
515 ptr_void_type_node = build_pointer_type (void_type_node);
516
517 }
518
519 /* Create the predefined scalar types such as `integer_type_node' needed
520 in the gcc back-end and initialize the global binding level. */
521
522 void
523 init_gigi_decls (long_long_float_type, exception_type)
524 tree long_long_float_type, exception_type;
525 {
526 tree endlink;
527
528 /* Set the types that GCC and Gigi use from the front end. We would like
529 to do this for char_type_node, but it needs to correspond to the C
530 char type. */
531 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
532 {
533 /* In this case, the builtin floating point types are VAX float,
534 so make up a type for use. */
535 longest_float_type_node = make_node (REAL_TYPE);
536 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
537 layout_type (longest_float_type_node);
538 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
539 longest_float_type_node));
540 }
541 else
542 longest_float_type_node = TREE_TYPE (long_long_float_type);
543
544 except_type_node = TREE_TYPE (exception_type);
545
546 unsigned_type_node = type_for_size (INT_TYPE_SIZE, 1);
547 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
548 unsigned_type_node));
549
550 void_type_decl_node
551 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
552 void_type_node));
553
554 void_ftype = build_function_type (void_type_node, NULL_TREE);
555 ptr_void_ftype = build_pointer_type (void_ftype);
556
557 /* Now declare runtime functions. */
558 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
559
560 /* malloc is a function declaration tree for a function to allocate
561 memory. */
562 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
563 NULL_TREE,
564 build_function_type (ptr_void_type_node,
565 tree_cons (NULL_TREE,
566 sizetype,
567 endlink)),
568 NULL_TREE, 0, 1, 1, 0);
569
570 /* free is a function declaration tree for a function to free memory. */
571
572 free_decl
573 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
574 build_function_type (void_type_node,
575 tree_cons (NULL_TREE,
576 ptr_void_type_node,
577 endlink)),
578 NULL_TREE, 0, 1, 1, 0);
579
580 /* Make the types and functions used for exception processing. */
581 jmpbuf_type
582 = build_array_type (type_for_mode (Pmode, 0),
583 build_index_type (build_int_2 (5, 0)));
584 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
585 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
586
587 /* Functions to get and set the jumpbuf pointer for the current thread. */
588 get_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
591 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
592 NULL_TREE, 0, 1, 1, 0);
593
594 set_jmpbuf_decl
595 = create_subprog_decl
596 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
597 NULL_TREE,
598 build_function_type (void_type_node,
599 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
600 NULL_TREE, 0, 1, 1, 0);
601
602 /* Function to get the current exception. */
603 get_excptr_decl
604 = create_subprog_decl
605 (get_identifier ("system__soft_links__get_gnat_exception"),
606 NULL_TREE,
607 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
608 NULL_TREE, 0, 1, 1, 0);
609
610 /* Function that raise exceptions. */
611 raise_nodefer_decl
612 = create_subprog_decl
613 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 build_pointer_type (except_type_node),
617 endlink)),
618 NULL_TREE, 0, 1, 1, 0);
619
620
621 /* __gnat_raise_constraint_error takes a string, an integer and never
622 returns. */
623 raise_constraint_error_decl
624 = create_subprog_decl
625 (get_identifier ("__gnat_raise_constraint_error"), NULL_TREE,
626 build_function_type (void_type_node,
627 tree_cons (NULL_TREE,
628 build_pointer_type (char_type_node),
629 tree_cons (NULL_TREE,
630 integer_type_node,
631 endlink))),
632 NULL_TREE, 0, 1, 1, 0);
633
634 /* Likewise for __gnat_raise_program_error. */
635 raise_program_error_decl
636 = create_subprog_decl
637 (get_identifier ("__gnat_raise_program_error"), NULL_TREE,
638 build_function_type (void_type_node,
639 tree_cons (NULL_TREE,
640 build_pointer_type (char_type_node),
641 tree_cons (NULL_TREE,
642 integer_type_node,
643 endlink))),
644 NULL_TREE, 0, 1, 1, 0);
645
646 /* Likewise for __gnat_raise_storage_error. */
647 raise_storage_error_decl
648 = create_subprog_decl
649 (get_identifier ("__gnat_raise_storage_error"), NULL_TREE,
650 build_function_type (void_type_node,
651 tree_cons (NULL_TREE,
652 build_pointer_type (char_type_node),
653 tree_cons (NULL_TREE,
654 integer_type_node,
655 endlink))),
656 NULL_TREE, 0, 1, 1, 0);
657
658 /* Indicate that these never return. */
659
660 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
661 TREE_THIS_VOLATILE (raise_constraint_error_decl) = 1;
662 TREE_THIS_VOLATILE (raise_program_error_decl) = 1;
663 TREE_THIS_VOLATILE (raise_storage_error_decl) = 1;
664
665 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
666 TREE_SIDE_EFFECTS (raise_constraint_error_decl) = 1;
667 TREE_SIDE_EFFECTS (raise_program_error_decl) = 1;
668 TREE_SIDE_EFFECTS (raise_storage_error_decl) = 1;
669
670 TREE_TYPE (raise_nodefer_decl)
671 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
672 TYPE_QUAL_VOLATILE);
673 TREE_TYPE (raise_constraint_error_decl)
674 = build_qualified_type (TREE_TYPE (raise_constraint_error_decl),
675 TYPE_QUAL_VOLATILE);
676 TREE_TYPE (raise_program_error_decl)
677 = build_qualified_type (TREE_TYPE (raise_program_error_decl),
678 TYPE_QUAL_VOLATILE);
679 TREE_TYPE (raise_storage_error_decl)
680 = build_qualified_type (TREE_TYPE (raise_storage_error_decl),
681 TYPE_QUAL_VOLATILE);
682
683 /* setjmp returns an integer and has one operand, which is a pointer to
684 a jmpbuf. */
685 setjmp_decl
686 = create_subprog_decl
687 (get_identifier ("setjmp"), NULL_TREE,
688 build_function_type (integer_type_node,
689 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
690 NULL_TREE, 0, 1, 1, 0);
691
692 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
693 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
694
695 ggc_add_tree_root (gnat_std_decls,
696 sizeof gnat_std_decls / sizeof gnat_std_decls[0]);
697 }
698 \f
699 /* This routine is called in tree.c to print an error message for invalid use
700 of an incomplete type. */
701
702 void
703 incomplete_type_error (dont_care_1, dont_care_2)
704 tree dont_care_1 ATTRIBUTE_UNUSED;
705 tree dont_care_2 ATTRIBUTE_UNUSED;
706 {
707 gigi_abort (404);
708 }
709
710 /* This function is called indirectly from toplev.c to handle incomplete
711 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
712 compile_file in toplev.c makes an indirect call through the function pointer
713 incomplete_decl_finalize_hook which is initialized to this routine in
714 init_decl_processing. */
715
716 void
717 finish_incomplete_decl (dont_care)
718 tree dont_care ATTRIBUTE_UNUSED;
719 {
720 gigi_abort (405);
721 }
722 \f
723 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
724 nodes (FIELDLIST), finish constructing the record or union type.
725 If HAS_REP is nonzero, this record has a rep clause; don't call
726 layout_type but merely set the size and alignment ourselves.
727 If DEFER_DEBUG is nonzero, do not call the debugging routines
728 on this type; it will be done later. */
729
730 void
731 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
732 tree record_type;
733 tree fieldlist;
734 int has_rep;
735 int defer_debug;
736 {
737 enum tree_code code = TREE_CODE (record_type);
738 tree ada_size = bitsize_zero_node;
739 tree size = bitsize_zero_node;
740 tree size_unit = size_zero_node;
741 tree field;
742
743 TYPE_FIELDS (record_type) = fieldlist;
744
745 if (TYPE_NAME (record_type) != 0
746 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
747 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
748 else
749 TYPE_STUB_DECL (record_type)
750 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
751 record_type));
752
753 /* We don't need both the typedef name and the record name output in
754 the debugging information, since they are the same. */
755 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
756
757 /* Globally initialize the record first. If this is a rep'ed record,
758 that just means some initializations; otherwise, layout the record. */
759
760 if (has_rep)
761 {
762 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
763 TYPE_MODE (record_type) = BLKmode;
764 if (TYPE_SIZE (record_type) == 0)
765 {
766 TYPE_SIZE (record_type) = bitsize_zero_node;
767 TYPE_SIZE_UNIT (record_type) = size_zero_node;
768 }
769 }
770 else
771 {
772 /* Ensure there isn't a size already set. There can be in an error
773 case where there is a rep clause but all fields have errors and
774 no longer have a position. */
775 TYPE_SIZE (record_type) = 0;
776 layout_type (record_type);
777 }
778
779 /* At this point, the position and size of each field is known. It was
780 either set before entry by a rep clause, or by laying out the type
781 above. We now make a pass through the fields (in reverse order for
782 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
783 (for rep'ed records that are not padding types); and the mode (for
784 rep'ed records). */
785
786 if (code == QUAL_UNION_TYPE)
787 fieldlist = nreverse (fieldlist);
788
789 for (field = fieldlist; field; field = TREE_CHAIN (field))
790 {
791 tree type = TREE_TYPE (field);
792 tree this_size = DECL_SIZE (field);
793 tree this_size_unit = DECL_SIZE_UNIT (field);
794 tree this_ada_size = DECL_SIZE (field);
795
796 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
797 || TREE_CODE (type) == QUAL_UNION_TYPE)
798 && ! TYPE_IS_FAT_POINTER_P (type)
799 && ! TYPE_CONTAINS_TEMPLATE_P (type)
800 && TYPE_ADA_SIZE (type) != 0)
801 this_ada_size = TYPE_ADA_SIZE (type);
802
803 if (has_rep && ! DECL_BIT_FIELD (field))
804 TYPE_ALIGN (record_type)
805 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
806
807 switch (code)
808 {
809 case UNION_TYPE:
810 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
811 size = size_binop (MAX_EXPR, size, this_size);
812 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
813 break;
814
815 case QUAL_UNION_TYPE:
816 ada_size
817 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
818 this_ada_size, ada_size));
819 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
820 this_size, size));
821 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
822 this_size_unit, size_unit));
823 break;
824
825 case RECORD_TYPE:
826 /* Since we know here that all fields are sorted in order of
827 increasing bit position, the size of the record is one
828 higher than the ending bit of the last field processed
829 unless we have a rep clause, since in that case we might
830 have a field outside a QUAL_UNION_TYPE that has a higher ending
831 position. So use a MAX in that case. Also, if this field is a
832 QUAL_UNION_TYPE, we need to take into account the previous size in
833 the case of empty variants. */
834 ada_size
835 = merge_sizes (ada_size, bit_position (field), this_ada_size,
836 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
837 size = merge_sizes (size, bit_position (field), this_size,
838 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
839 size_unit
840 = merge_sizes (size_unit, byte_position (field), this_size_unit,
841 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
842 break;
843
844 default:
845 abort ();
846 }
847 }
848
849 if (code == QUAL_UNION_TYPE)
850 nreverse (fieldlist);
851
852 /* If this is a padding record, we never want to make the size smaller than
853 what was specified in it, if any. */
854 if (TREE_CODE (record_type) == RECORD_TYPE
855 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
856 {
857 size = TYPE_SIZE (record_type);
858 size_unit = TYPE_SIZE_UNIT (record_type);
859 }
860
861 /* Now set any of the values we've just computed that apply. */
862 if (! TYPE_IS_FAT_POINTER_P (record_type)
863 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
864 TYPE_ADA_SIZE (record_type) = ada_size;
865
866 #ifdef ROUND_TYPE_SIZE
867 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
868 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
869 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
870 #else
871 size = round_up (size, TYPE_ALIGN (record_type));
872 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
873 #endif
874
875 if (has_rep
876 && ! (TREE_CODE (record_type) == RECORD_TYPE
877 && TYPE_IS_PADDING_P (record_type)
878 && TREE_CODE (size) != INTEGER_CST
879 && contains_placeholder_p (size)))
880 {
881 TYPE_SIZE (record_type) = size;
882 TYPE_SIZE_UNIT (record_type) = size_unit;
883 }
884
885 if (has_rep)
886 compute_record_mode (record_type);
887
888 if (! defer_debug)
889 {
890 /* If this record is of variable size, rename it so that the
891 debugger knows it is and make a new, parallel, record
892 that tells the debugger how the record is laid out. See
893 exp_dbug.ads. */
894 if (TREE_CODE (TYPE_SIZE (record_type)) != INTEGER_CST)
895 {
896 tree new_record_type
897 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
898 ? UNION_TYPE : TREE_CODE (record_type));
899 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
900 tree new_id
901 = concat_id_with_name (orig_id,
902 TREE_CODE (record_type) == QUAL_UNION_TYPE
903 ? "XVU" : "XVE");
904 tree last_pos = bitsize_zero_node;
905 tree old_field;
906
907 TYPE_NAME (new_record_type) = new_id;
908 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
909 TYPE_STUB_DECL (new_record_type)
910 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
911 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
912 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
913 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
914 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
915
916 /* Now scan all the fields, replacing each field with a new
917 field corresponding to the new encoding. */
918 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
919 old_field = TREE_CHAIN (old_field))
920 {
921 tree field_type = TREE_TYPE (old_field);
922 tree field_name = DECL_NAME (old_field);
923 tree new_field;
924 tree curpos = bit_position (old_field);
925 int var = 0;
926 unsigned int align = 0;
927 tree pos;
928
929 /* See how the position was modified from the last position.
930
931 There are two basic cases we support: a value was added
932 to the last position or the last position was rounded to
933 a boundary and they something was added. Check for the
934 first case first. If not, see if there is any evidence
935 of rounding. If so, round the last position and try
936 again.
937
938 If this is a union, the position can be taken as zero. */
939
940 if (TREE_CODE (new_record_type) == UNION_TYPE)
941 pos = bitsize_zero_node, align = 0;
942 else
943 pos = compute_related_constant (curpos, last_pos);
944
945 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
946 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
947 {
948 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
949 pos = compute_related_constant (curpos,
950 round_up (last_pos, align));
951 }
952 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
953 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
954 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
955 && host_integerp (TREE_OPERAND
956 (TREE_OPERAND (curpos, 0), 1),
957 1))
958 {
959 align
960 = tree_low_cst
961 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
962 pos = compute_related_constant (curpos,
963 round_up (last_pos, align));
964 }
965
966 /* If we can't compute a position, set it to zero.
967
968 ??? We really should abort here, but it's too much work
969 to get this correct for all cases. */
970
971 if (pos == 0)
972 pos = bitsize_zero_node;
973
974 /* See if this type is variable-size and make a new type
975 and indicate the indirection if so. */
976 if (TREE_CODE (TYPE_SIZE (field_type)) != INTEGER_CST)
977 {
978 field_type = build_pointer_type (field_type);
979 var = 1;
980 }
981
982 /* Make a new field name, if necessary. */
983 if (var || align != 0)
984 {
985 char suffix[6];
986
987 if (align != 0)
988 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
989 align / BITS_PER_UNIT);
990 else
991 strcpy (suffix, "XVL");
992
993 field_name = concat_id_with_name (field_name, suffix);
994 }
995
996 new_field = create_field_decl (field_name, field_type,
997 new_record_type, 0,
998 TYPE_SIZE (field_type), pos, 0);
999 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1000 TYPE_FIELDS (new_record_type) = new_field;
1001
1002 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1003 zero. The only time it's not the last field of the record
1004 is when there are other components at fixed positions after
1005 it (meaning there was a rep clause for every field) and we
1006 want to be able to encode them. */
1007 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1008 (TREE_CODE (TREE_TYPE (old_field))
1009 == QUAL_UNION_TYPE)
1010 ? bitsize_zero_node
1011 : TYPE_SIZE (TREE_TYPE (old_field)));
1012 }
1013
1014 TYPE_FIELDS (new_record_type)
1015 = nreverse (TYPE_FIELDS (new_record_type));
1016
1017 rest_of_type_compilation (new_record_type, global_bindings_p ());
1018 }
1019
1020 rest_of_type_compilation (record_type, global_bindings_p ());
1021 }
1022 }
1023
1024 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1025 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1026 if this represents a QUAL_UNION_TYPE in which case we must look for
1027 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1028 is nonzero, we must take the MAX of the end position of this field
1029 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1030
1031 We return an expression for the size. */
1032
1033 static tree
1034 merge_sizes (last_size, first_bit, size, special, has_rep)
1035 tree last_size;
1036 tree first_bit, size;
1037 int special;
1038 int has_rep;
1039 {
1040 tree type = TREE_TYPE (last_size);
1041
1042 if (! special || TREE_CODE (size) != COND_EXPR)
1043 {
1044 tree new = size_binop (PLUS_EXPR, first_bit, size);
1045
1046 if (has_rep)
1047 new = size_binop (MAX_EXPR, last_size, new);
1048
1049 return new;
1050 }
1051
1052 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1053 integer_zerop (TREE_OPERAND (size, 1))
1054 ? last_size : merge_sizes (last_size, first_bit,
1055 TREE_OPERAND (size, 1),
1056 1, has_rep),
1057 integer_zerop (TREE_OPERAND (size, 2))
1058 ? last_size : merge_sizes (last_size, first_bit,
1059 TREE_OPERAND (size, 2),
1060 1, has_rep)));
1061 }
1062
1063 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1064 related by the addition of a constant. Return that constant if so. */
1065
1066 static tree
1067 compute_related_constant (op0, op1)
1068 tree op0, op1;
1069 {
1070 tree op0_var, op1_var;
1071 tree op0_con = split_plus (op0, &op0_var);
1072 tree op1_con = split_plus (op1, &op1_var);
1073 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1074
1075 if (operand_equal_p (op0_var, op1_var, 0))
1076 return result;
1077 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1078 return result;
1079 else
1080 return 0;
1081 }
1082
1083 /* Utility function of above to split a tree OP which may be a sum, into a
1084 constant part, which is returned, and a variable part, which is stored
1085 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1086 sizetype. */
1087
1088 static tree
1089 split_plus (in, pvar)
1090 tree in;
1091 tree *pvar;
1092 {
1093 tree result = bitsize_zero_node;
1094
1095 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1096 in = TREE_OPERAND (in, 0);
1097
1098 *pvar = in;
1099 if (TREE_CODE (in) == INTEGER_CST)
1100 {
1101 *pvar = bitsize_zero_node;
1102 return in;
1103 }
1104 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1105 {
1106 tree lhs_var, rhs_var;
1107 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1108 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1109
1110 result = size_binop (PLUS_EXPR, result, lhs_con);
1111 result = size_binop (TREE_CODE (in), result, rhs_con);
1112
1113 if (lhs_var == TREE_OPERAND (in, 0)
1114 && rhs_var == TREE_OPERAND (in, 1))
1115 return bitsize_zero_node;
1116
1117 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1118 return result;
1119 }
1120 else
1121 return bitsize_zero_node;
1122 }
1123 \f
1124 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1125 subprogram. If it is void_type_node, then we are dealing with a procedure,
1126 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1127 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1128 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1129 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1130 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1131 RETURNS_WITH_DSP is nonzero if the function is to return with a
1132 depressed stack pointer. */
1133
1134 tree
1135 create_subprog_type (return_type, param_decl_list, cico_list,
1136 returns_unconstrained, returns_by_ref, returns_with_dsp)
1137 tree return_type;
1138 tree param_decl_list;
1139 tree cico_list;
1140 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1141 {
1142 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1143 the subprogram formal parameters. This list is generated by traversing the
1144 input list of PARM_DECL nodes. */
1145 tree param_type_list = NULL;
1146 tree param_decl;
1147 tree type;
1148
1149 for (param_decl = param_decl_list; param_decl;
1150 param_decl = TREE_CHAIN (param_decl))
1151 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1152 param_type_list);
1153
1154 /* The list of the function parameter types has to be terminated by the void
1155 type to signal to the back-end that we are not dealing with a variable
1156 parameter subprogram, but that the subprogram has a fixed number of
1157 parameters. */
1158 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1159
1160 /* The list of argument types has been created in reverse
1161 so nreverse it. */
1162 param_type_list = nreverse (param_type_list);
1163
1164 type = build_function_type (return_type, param_type_list);
1165
1166 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1167 or the new type should, make a copy of TYPE. Likewise for
1168 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1169 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1170 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1171 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1172 type = copy_type (type);
1173
1174 TYPE_CI_CO_LIST (type) = cico_list;
1175 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1176 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1177 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1178 return type;
1179 }
1180 \f
1181 /* Return a copy of TYPE but safe to modify in any way. */
1182
1183 tree
1184 copy_type (type)
1185 tree type;
1186 {
1187 tree new = copy_node (type);
1188
1189 /* copy_node clears this field instead of copying it, because it is
1190 aliased with TREE_CHAIN. */
1191 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1192
1193 TYPE_POINTER_TO (new) = 0;
1194 TYPE_REFERENCE_TO (new) = 0;
1195 TYPE_MAIN_VARIANT (new) = new;
1196 TYPE_NEXT_VARIANT (new) = 0;
1197
1198 return new;
1199 }
1200 \f
1201 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1202 TYPE_INDEX_TYPE is INDEX. */
1203
1204 tree
1205 create_index_type (min, max, index)
1206 tree min, max;
1207 tree index;
1208 {
1209 /* First build a type for the desired range. */
1210 tree type = build_index_2_type (min, max);
1211
1212 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1213 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1214 is set, but not to INDEX, make a copy of this type with the requested
1215 index type. Note that we have no way of sharing these types, but that's
1216 only a small hole. */
1217 if (TYPE_INDEX_TYPE (type) == index)
1218 return type;
1219 else if (TYPE_INDEX_TYPE (type) != 0)
1220 type = copy_type (type);
1221
1222 TYPE_INDEX_TYPE (type) = index;
1223 return type;
1224 }
1225 \f
1226 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1227 string) and TYPE is a ..._TYPE node giving its data type.
1228 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1229 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1230 information about this type. */
1231
1232 tree
1233 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1234 tree type_name;
1235 tree type;
1236 struct attrib *attr_list;
1237 int artificial_p;
1238 int debug_info_p;
1239 {
1240 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1241 enum tree_code code = TREE_CODE (type);
1242
1243 DECL_ARTIFICIAL (type_decl) = artificial_p;
1244 pushdecl (type_decl);
1245 process_attributes (type_decl, attr_list);
1246
1247 /* Pass type declaration information to the debugger unless this is an
1248 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1249 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1250 a dummy type, which will be completed later, or a type for which
1251 debugging information was not requested. */
1252 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1253 || ! debug_info_p)
1254 DECL_IGNORED_P (type_decl) = 1;
1255 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1256 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1257 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1258 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1259
1260 return type_decl;
1261 }
1262
1263 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1264 ASM_NAME is its assembler name (if provided). TYPE is its data type
1265 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1266 expression; NULL_TREE if none.
1267
1268 CONST_FLAG is nonzero if this variable is constant.
1269
1270 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1271 the current compilation unit. This flag should be set when processing the
1272 variable definitions in a package specification. EXTERN_FLAG is nonzero
1273 when processing an external variable declaration (as opposed to a
1274 definition: no storage is to be allocated for the variable here).
1275
1276 STATIC_FLAG is only relevant when not at top level. In that case
1277 it indicates whether to always allocate storage to the variable. */
1278
1279 tree
1280 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1281 extern_flag, static_flag, attr_list)
1282 tree var_name;
1283 tree asm_name;
1284 tree type;
1285 tree var_init;
1286 int const_flag;
1287 int public_flag;
1288 int extern_flag;
1289 int static_flag;
1290 struct attrib *attr_list;
1291 {
1292 int init_const
1293 = (var_init == 0
1294 ? 0
1295 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1296 && (global_bindings_p () || static_flag
1297 ? 0 != initializer_constant_valid_p (var_init,
1298 TREE_TYPE (var_init))
1299 : TREE_CONSTANT (var_init))));
1300 tree var_decl
1301 = build_decl ((const_flag && init_const
1302 /* Only make a CONST_DECL for sufficiently-small objects.
1303 We consider complex double "sufficiently-small" */
1304 && TYPE_SIZE (type) != 0
1305 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1306 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1307 GET_MODE_SIZE (DCmode)))
1308 ? CONST_DECL : VAR_DECL, var_name, type);
1309 tree assign_init = 0;
1310
1311 /* If this is external, throw away any initializations unless this is a
1312 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1313 we are defining a global here, leave a constant initialization and save
1314 any variable elaborations for the elaboration routine. Otherwise, if
1315 the initializing expression is not the same as TYPE, generate the
1316 initialization with an assignment statement, since it knows how
1317 to do the required adjustents. If we are just annotating types,
1318 throw away the initialization if it isn't a constant. */
1319
1320 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1321 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1322 var_init = 0;
1323
1324 if (global_bindings_p () && var_init != 0 && ! init_const)
1325 {
1326 add_pending_elaborations (var_decl, var_init);
1327 var_init = 0;
1328 }
1329
1330 else if (var_init != 0
1331 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1332 != TYPE_MAIN_VARIANT (type))
1333 || (static_flag && ! init_const)))
1334 assign_init = var_init, var_init = 0;
1335
1336 DECL_COMMON (var_decl) = !flag_no_common;
1337 DECL_INITIAL (var_decl) = var_init;
1338 TREE_READONLY (var_decl) = const_flag;
1339 DECL_EXTERNAL (var_decl) = extern_flag;
1340 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1341 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1342 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1343 = TYPE_VOLATILE (type);
1344
1345 /* At the global binding level we need to allocate static storage for the
1346 variable if and only if its not external. If we are not at the top level
1347 we allocate automatic storage unless requested not to. */
1348 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1349
1350 if (asm_name != 0)
1351 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1352
1353 process_attributes (var_decl, attr_list);
1354
1355 /* Add this decl to the current binding level and generate any
1356 needed code and RTL. */
1357 var_decl = pushdecl (var_decl);
1358 expand_decl (var_decl);
1359
1360 if (DECL_CONTEXT (var_decl) != 0)
1361 expand_decl_init (var_decl);
1362
1363 /* If this is volatile, force it into memory. */
1364 if (TREE_SIDE_EFFECTS (var_decl))
1365 mark_addressable (var_decl);
1366
1367 if (TREE_CODE (var_decl) != CONST_DECL)
1368 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1369
1370 if (assign_init != 0)
1371 {
1372 /* If VAR_DECL has a padded type, convert it to the unpadded
1373 type so the assignment is done properly. */
1374 tree lhs = var_decl;
1375
1376 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1377 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1378 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1379
1380 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1381 assign_init));
1382 }
1383
1384 return var_decl;
1385 }
1386 \f
1387 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1388 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1389 this field is in a record type with a "pragma pack". If SIZE is nonzero
1390 it is the specified size for this field. If POS is nonzero, it is the bit
1391 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1392 the address of this field for aliasing purposes. */
1393
1394 tree
1395 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1396 addressable)
1397 tree field_name;
1398 tree field_type;
1399 tree record_type;
1400 int packed;
1401 tree size, pos;
1402 int addressable;
1403 {
1404 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1405
1406 DECL_CONTEXT (field_decl) = record_type;
1407 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1408
1409 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1410 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1411 If it is a padding type where the inner field is of variable size, it
1412 must be at its natural alignment. Just handle the packed case here; we
1413 will disallow non-aligned rep clauses elsewhere. */
1414 if (packed && TYPE_MODE (field_type) == BLKmode)
1415 DECL_ALIGN (field_decl)
1416 = ((TREE_CODE (field_type) == RECORD_TYPE
1417 && TYPE_IS_PADDING_P (field_type)
1418 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1419 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1420
1421 /* If a size is specified, use it. Otherwise, see if we have a size
1422 to use that may differ from the natural size of the object. */
1423 if (size != 0)
1424 size = convert (bitsizetype, size);
1425 else if (packed)
1426 {
1427 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1428 TYPE_SIZE (field_type), 0))
1429 size = rm_size (field_type);
1430
1431 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1432 byte. */
1433 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1434 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1435 size = round_up (size, BITS_PER_UNIT);
1436 }
1437
1438 /* Make a bitfield if a size is specified for two reasons: first if the size
1439 differs from the natural size. Second, if the alignment is insufficient.
1440 There are a number of ways the latter can be true. But never make a
1441 bitfield if the type of the field has a nonconstant size. */
1442
1443 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1444 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1445 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1446 || (pos != 0
1447 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1448 bitsize_int (TYPE_ALIGN
1449 (field_type)))))
1450 || packed
1451 || (TYPE_ALIGN (record_type) != 0
1452 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1453 {
1454 DECL_BIT_FIELD (field_decl) = 1;
1455 DECL_SIZE (field_decl) = size;
1456 if (! packed && pos == 0)
1457 DECL_ALIGN (field_decl)
1458 = (TYPE_ALIGN (record_type) != 0
1459 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1460 : TYPE_ALIGN (field_type));
1461 }
1462
1463 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1464 DECL_ALIGN (field_decl)
1465 = MAX (DECL_ALIGN (field_decl),
1466 DECL_BIT_FIELD (field_decl) ? 1
1467 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1468 : TYPE_ALIGN (field_type));
1469
1470 if (pos != 0)
1471 {
1472 /* We need to pass in the alignment the DECL is known to have.
1473 This is the lowest-order bit set in POS, but no more than
1474 the alignment of the record, if one is specified. Note
1475 that an alignment of 0 is taken as infinite. */
1476 unsigned int known_align;
1477
1478 if (host_integerp (pos, 1))
1479 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1480 else
1481 known_align = BITS_PER_UNIT;
1482
1483 if (TYPE_ALIGN (record_type)
1484 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1485 known_align = TYPE_ALIGN (record_type);
1486
1487 layout_decl (field_decl, known_align);
1488 SET_DECL_OFFSET_ALIGN (field_decl, BIGGEST_ALIGNMENT);
1489 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1490 &DECL_FIELD_BIT_OFFSET (field_decl),
1491 BIGGEST_ALIGNMENT, pos);
1492
1493 DECL_HAS_REP_P (field_decl) = 1;
1494 }
1495
1496 /* Mark the decl as nonaddressable if it either is indicated so semantically
1497 or if it is a bit field. */
1498 DECL_NONADDRESSABLE_P (field_decl)
1499 = ! addressable || DECL_BIT_FIELD (field_decl);
1500
1501 return field_decl;
1502 }
1503
1504 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1505 effects, has the value of zero. */
1506
1507 static int
1508 value_zerop (exp)
1509 tree exp;
1510 {
1511 if (TREE_CODE (exp) == COMPOUND_EXPR)
1512 return value_zerop (TREE_OPERAND (exp, 1));
1513
1514 return integer_zerop (exp);
1515 }
1516 \f
1517 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1518 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1519 readonly (either an IN parameter or an address of a pass-by-ref
1520 parameter). */
1521
1522 tree
1523 create_param_decl (param_name, param_type, readonly)
1524 tree param_name;
1525 tree param_type;
1526 int readonly;
1527 {
1528 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1529
1530 DECL_ARG_TYPE (param_decl) = param_type;
1531 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1532 TREE_READONLY (param_decl) = readonly;
1533 return param_decl;
1534 }
1535 \f
1536 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1537
1538 void
1539 process_attributes (decl, attr_list)
1540 tree decl;
1541 struct attrib *attr_list;
1542 {
1543 for (; attr_list; attr_list = attr_list->next)
1544 switch (attr_list->type)
1545 {
1546 case ATTR_MACHINE_ATTRIBUTE:
1547 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1548 NULL_TREE),
1549 ATTR_FLAG_TYPE_IN_PLACE);
1550 break;
1551
1552 case ATTR_LINK_ALIAS:
1553 TREE_STATIC (decl) = 1;
1554 assemble_alias (decl, attr_list->name);
1555 break;
1556
1557 case ATTR_WEAK_EXTERNAL:
1558 if (SUPPORTS_WEAK)
1559 declare_weak (decl);
1560 else
1561 post_error ("?weak declarations not supported on this target",
1562 attr_list->error_point);
1563 break;
1564
1565 case ATTR_LINK_SECTION:
1566 #ifdef ASM_OUTPUT_SECTION_NAME
1567 DECL_SECTION_NAME (decl)
1568 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1569 IDENTIFIER_POINTER (attr_list->name));
1570 DECL_COMMON (decl) = 0;
1571 #else
1572 post_error ("?section attributes are not supported for this target",
1573 attr_list->error_point);
1574 #endif
1575 break;
1576 }
1577 }
1578 \f
1579 /* Add some pending elaborations on the list. */
1580
1581 void
1582 add_pending_elaborations (var_decl, var_init)
1583 tree var_decl;
1584 tree var_init;
1585 {
1586 if (var_init != 0)
1587 Check_Elaboration_Code_Allowed (error_gnat_node);
1588
1589 pending_elaborations
1590 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1591 }
1592
1593 /* Obtain any pending elaborations and clear the old list. */
1594
1595 tree
1596 get_pending_elaborations ()
1597 {
1598 /* Each thing added to the list went on the end; we want it on the
1599 beginning. */
1600 tree result = TREE_CHAIN (pending_elaborations);
1601
1602 TREE_CHAIN (pending_elaborations) = 0;
1603 return result;
1604 }
1605
1606 /* Mark the binding level stack. */
1607
1608 static void
1609 mark_binding_level (arg)
1610 PTR arg;
1611 {
1612 struct binding_level *level = *(struct binding_level **) arg;
1613
1614 for (; level != 0; level = level->level_chain)
1615 {
1616 ggc_mark_tree (level->names);
1617 ggc_mark_tree (level->blocks);
1618 ggc_mark_tree (level->this_block);
1619 }
1620 }
1621
1622 /* Mark the pending elaboration list. */
1623
1624 static void
1625 mark_e_stack (data)
1626 PTR data;
1627 {
1628 struct e_stack *p = *((struct e_stack **) data);
1629
1630 if (p != 0)
1631 {
1632 ggc_mark_tree (p->elab_list);
1633 mark_e_stack (&p->next);
1634 }
1635 }
1636
1637 /* Return nonzero if there are pending elaborations. */
1638
1639 int
1640 pending_elaborations_p ()
1641 {
1642 return TREE_CHAIN (pending_elaborations) != 0;
1643 }
1644
1645 /* Save a copy of the current pending elaboration list and make a new
1646 one. */
1647
1648 void
1649 push_pending_elaborations ()
1650 {
1651 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1652
1653 p->next = elist_stack;
1654 p->elab_list = pending_elaborations;
1655 elist_stack = p;
1656 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1657 }
1658
1659 /* Pop the stack of pending elaborations. */
1660
1661 void
1662 pop_pending_elaborations ()
1663 {
1664 struct e_stack *p = elist_stack;
1665
1666 pending_elaborations = p->elab_list;
1667 elist_stack = p->next;
1668 free (p);
1669 }
1670
1671 /* Return the current position in pending_elaborations so we can insert
1672 elaborations after that point. */
1673
1674 tree
1675 get_elaboration_location ()
1676 {
1677 return tree_last (pending_elaborations);
1678 }
1679
1680 /* Insert the current elaborations after ELAB, which is in some elaboration
1681 list. */
1682
1683 void
1684 insert_elaboration_list (elab)
1685 tree elab;
1686 {
1687 tree next = TREE_CHAIN (elab);
1688
1689 if (TREE_CHAIN (pending_elaborations))
1690 {
1691 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1692 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1693 TREE_CHAIN (pending_elaborations) = 0;
1694 }
1695 }
1696
1697 /* Returns a LABEL_DECL node for LABEL_NAME. */
1698
1699 tree
1700 create_label_decl (label_name)
1701 tree label_name;
1702 {
1703 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1704
1705 DECL_CONTEXT (label_decl) = current_function_decl;
1706 DECL_MODE (label_decl) = VOIDmode;
1707 DECL_SOURCE_LINE (label_decl) = lineno;
1708 DECL_SOURCE_FILE (label_decl) = input_filename;
1709
1710 return label_decl;
1711 }
1712 \f
1713 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1714 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1715 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1716 PARM_DECL nodes chained through the TREE_CHAIN field).
1717
1718 INLINE_FLAG, PUBLIC_FLAG, and EXTERN_FLAG are used to set the appropriate
1719 fields in the FUNCTION_DECL. */
1720
1721 tree
1722 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1723 inline_flag, public_flag, extern_flag, attr_list)
1724 tree subprog_name;
1725 tree asm_name;
1726 tree subprog_type;
1727 tree param_decl_list;
1728 int inline_flag;
1729 int public_flag;
1730 int extern_flag;
1731 struct attrib *attr_list;
1732 {
1733 tree return_type = TREE_TYPE (subprog_type);
1734 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1735
1736 /* If this is a function nested inside an inlined external function, it
1737 means we aren't going to compile the outer function unless it is
1738 actually inlined, so do the same for us. */
1739 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1740 && DECL_EXTERNAL (current_function_decl))
1741 extern_flag = 1;
1742
1743 DECL_EXTERNAL (subprog_decl) = extern_flag;
1744 TREE_PUBLIC (subprog_decl) = public_flag;
1745 DECL_INLINE (subprog_decl) = inline_flag;
1746 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1747 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1748 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1749 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1750 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1751
1752 if (asm_name != 0)
1753 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1754
1755 process_attributes (subprog_decl, attr_list);
1756
1757 /* Add this decl to the current binding level. */
1758 subprog_decl = pushdecl (subprog_decl);
1759
1760 /* Output the assembler code and/or RTL for the declaration. */
1761 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1762
1763 return subprog_decl;
1764 }
1765 \f
1766 /* Count how deep we are into nested functions. This is because
1767 we shouldn't call the backend function context routines unless we
1768 are in a nested function. */
1769
1770 static int function_nesting_depth;
1771
1772 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1773 body. This routine needs to be invoked before processing the declarations
1774 appearing in the subprogram. */
1775
1776 void
1777 begin_subprog_body (subprog_decl)
1778 tree subprog_decl;
1779 {
1780 tree param_decl_list;
1781 tree param_decl;
1782 tree next_param;
1783
1784 if (function_nesting_depth++ != 0)
1785 push_function_context ();
1786
1787 announce_function (subprog_decl);
1788
1789 /* Make this field nonzero so further routines know that this is not
1790 tentative. error_mark_node is replaced below (in poplevel) with the
1791 adequate BLOCK. */
1792 DECL_INITIAL (subprog_decl) = error_mark_node;
1793
1794 /* This function exists in static storage. This does not mean `static' in
1795 the C sense! */
1796 TREE_STATIC (subprog_decl) = 1;
1797
1798 /* Enter a new binding level. */
1799 current_function_decl = subprog_decl;
1800 pushlevel (0);
1801
1802 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1803 subprogram body) so that they can be recognized as local variables in the
1804 subprogram.
1805
1806 The list of PARM_DECL nodes is stored in the right order in
1807 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1808 which they are transmitted to `pushdecl' we need to reverse the list of
1809 PARM_DECLs if we want it to be stored in the right order. The reason why
1810 we want to make sure the PARM_DECLs are stored in the correct order is
1811 that this list will be retrieved in a few lines with a call to `getdecl'
1812 to store it back into the DECL_ARGUMENTS field. */
1813 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1814
1815 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1816 {
1817 next_param = TREE_CHAIN (param_decl);
1818 TREE_CHAIN (param_decl) = NULL;
1819 pushdecl (param_decl);
1820 }
1821
1822 /* Store back the PARM_DECL nodes. They appear in the right order. */
1823 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1824
1825 init_function_start (subprog_decl, input_filename, lineno);
1826 expand_function_start (subprog_decl, 0);
1827 }
1828
1829
1830 /* Finish the definition of the current subprogram and compile it all the way
1831 to assembler language output. */
1832
1833 void
1834 end_subprog_body ()
1835 {
1836 tree decl;
1837 tree cico_list;
1838
1839 poplevel (1, 0, 1);
1840 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1841 = current_function_decl;
1842
1843 /* Mark the RESULT_DECL as being in this subprogram. */
1844 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1845
1846 expand_function_end (input_filename, lineno, 0);
1847 rest_of_compilation (current_function_decl);
1848
1849 #if 0
1850 /* If we're sure this function is defined in this file then mark it
1851 as such */
1852 if (TREE_ASM_WRITTEN (current_function_decl))
1853 mark_fn_defined_in_this_file (current_function_decl);
1854 #endif
1855
1856 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1857 not be seen when we call this function and will be in
1858 unallocated memory anyway. */
1859 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1860 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1861 TREE_VALUE (cico_list) = 0;
1862
1863 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1864 {
1865 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1866 was saved for inline, in which case the DECL_RTLs are in
1867 preserved memory. */
1868 for (decl = DECL_ARGUMENTS (current_function_decl);
1869 decl != 0; decl = TREE_CHAIN (decl))
1870 {
1871 SET_DECL_RTL (decl, 0);
1872 DECL_INCOMING_RTL (decl) = 0;
1873 }
1874
1875 /* Similarly, discard DECL_RTL of the return value. */
1876 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1877
1878 /* But DECL_INITIAL must remain nonzero so we know this
1879 was an actual function definition unless toplev.c decided not
1880 to inline it. */
1881 if (DECL_INITIAL (current_function_decl) != 0)
1882 DECL_INITIAL (current_function_decl) = error_mark_node;
1883
1884 DECL_ARGUMENTS (current_function_decl) = 0;
1885 }
1886
1887 /* If we are not at the bottom of the function nesting stack, pop up to
1888 the containing function. Otherwise show we aren't in any function. */
1889 if (--function_nesting_depth != 0)
1890 pop_function_context ();
1891 else
1892 current_function_decl = 0;
1893 }
1894 \f
1895 /* Return a definition for a builtin function named NAME and whose data type
1896 is TYPE. TYPE should be a function type with argument types.
1897 FUNCTION_CODE tells later passes how to compile calls to this function.
1898 See tree.h for its possible values.
1899
1900 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1901 the name to be called if we can't opencode the function. */
1902
1903 tree
1904 builtin_function (name, type, function_code, class, library_name)
1905 const char *name;
1906 tree type;
1907 int function_code;
1908 enum built_in_class class;
1909 const char *library_name;
1910 {
1911 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1912
1913 DECL_EXTERNAL (decl) = 1;
1914 TREE_PUBLIC (decl) = 1;
1915 if (library_name)
1916 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1917
1918 pushdecl (decl);
1919 DECL_BUILT_IN_CLASS (decl) = class;
1920 DECL_FUNCTION_CODE (decl) = function_code;
1921 return decl;
1922 }
1923
1924 /* Return an integer type with the number of bits of precision given by
1925 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1926 it is a signed type. */
1927
1928 tree
1929 type_for_size (precision, unsignedp)
1930 unsigned precision;
1931 int unsignedp;
1932 {
1933 tree t;
1934 char type_name[20];
1935
1936 if (precision <= 2 * MAX_BITS_PER_WORD
1937 && signed_and_unsigned_types[precision][unsignedp] != 0)
1938 return signed_and_unsigned_types[precision][unsignedp];
1939
1940 if (unsignedp)
1941 t = make_unsigned_type (precision);
1942 else
1943 t = make_signed_type (precision);
1944
1945 if (precision <= 2 * MAX_BITS_PER_WORD)
1946 signed_and_unsigned_types[precision][unsignedp] = t;
1947
1948 if (TYPE_NAME (t) == 0)
1949 {
1950 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1951 TYPE_NAME (t) = get_identifier (type_name);
1952 }
1953
1954 return t;
1955 }
1956
1957 /* Likewise for floating-point types. */
1958
1959 static tree
1960 float_type_for_size (precision, mode)
1961 int precision;
1962 enum machine_mode mode;
1963 {
1964 tree t;
1965 char type_name[20];
1966
1967 if (float_types[(int) mode] != 0)
1968 return float_types[(int) mode];
1969
1970 float_types[(int) mode] = t = make_node (REAL_TYPE);
1971 TYPE_PRECISION (t) = precision;
1972 layout_type (t);
1973
1974 if (TYPE_MODE (t) != mode)
1975 gigi_abort (414);
1976
1977 if (TYPE_NAME (t) == 0)
1978 {
1979 sprintf (type_name, "FLOAT_%d", precision);
1980 TYPE_NAME (t) = get_identifier (type_name);
1981 }
1982
1983 return t;
1984 }
1985
1986 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1987 an unsigned type; otherwise a signed type is returned. */
1988
1989 tree
1990 type_for_mode (mode, unsignedp)
1991 enum machine_mode mode;
1992 int unsignedp;
1993 {
1994 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1995 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
1996 else
1997 return type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1998 }
1999
2000 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2001
2002 tree
2003 unsigned_type (type_node)
2004 tree type_node;
2005 {
2006 tree type = type_for_size (TYPE_PRECISION (type_node), 1);
2007
2008 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2009 {
2010 type = copy_node (type);
2011 TREE_TYPE (type) = type_node;
2012 }
2013 else if (TREE_TYPE (type_node) != 0
2014 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2015 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2016 {
2017 type = copy_node (type);
2018 TREE_TYPE (type) = TREE_TYPE (type_node);
2019 }
2020
2021 return type;
2022 }
2023
2024 /* Return the signed version of a TYPE_NODE, a scalar type. */
2025
2026 tree
2027 signed_type (type_node)
2028 tree type_node;
2029 {
2030 tree type = type_for_size (TYPE_PRECISION (type_node), 0);
2031
2032 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2033 {
2034 type = copy_node (type);
2035 TREE_TYPE (type) = type_node;
2036 }
2037 else if (TREE_TYPE (type_node) != 0
2038 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2039 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2040 {
2041 type = copy_node (type);
2042 TREE_TYPE (type) = TREE_TYPE (type_node);
2043 }
2044
2045 return type;
2046 }
2047
2048 /* Return a type the same as TYPE except unsigned or signed according to
2049 UNSIGNEDP. */
2050
2051 tree
2052 signed_or_unsigned_type (unsignedp, type)
2053 int unsignedp;
2054 tree type;
2055 {
2056 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2057 return type;
2058 else
2059 return type_for_size (TYPE_PRECISION (type), unsignedp);
2060 }
2061 \f
2062 /* EXP is an expression for the size of an object. If this size contains
2063 discriminant references, replace them with the maximum (if MAX_P) or
2064 minimum (if ! MAX_P) possible value of the discriminant. */
2065
2066 tree
2067 max_size (exp, max_p)
2068 tree exp;
2069 int max_p;
2070 {
2071 enum tree_code code = TREE_CODE (exp);
2072 tree type = TREE_TYPE (exp);
2073
2074 switch (TREE_CODE_CLASS (code))
2075 {
2076 case 'd':
2077 case 'c':
2078 return exp;
2079
2080 case 'x':
2081 if (code == TREE_LIST)
2082 return tree_cons (TREE_PURPOSE (exp),
2083 max_size (TREE_VALUE (exp), max_p),
2084 TREE_CHAIN (exp) != 0
2085 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2086 break;
2087
2088 case 'r':
2089 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2090 modify. Otherwise, we abort since it is something we can't
2091 handle. */
2092 if (! contains_placeholder_p (exp))
2093 gigi_abort (406);
2094
2095 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2096 return
2097 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2098
2099 case '<':
2100 return max_p ? size_one_node : size_zero_node;
2101
2102 case '1':
2103 case '2':
2104 case 'e':
2105 switch (TREE_CODE_LENGTH (code))
2106 {
2107 case 1:
2108 if (code == NON_LVALUE_EXPR)
2109 return max_size (TREE_OPERAND (exp, 0), max_p);
2110 else
2111 return
2112 fold (build1 (code, type,
2113 max_size (TREE_OPERAND (exp, 0),
2114 code == NEGATE_EXPR ? ! max_p : max_p)));
2115
2116 case 2:
2117 if (code == RTL_EXPR)
2118 gigi_abort (407);
2119 else if (code == COMPOUND_EXPR)
2120 return max_size (TREE_OPERAND (exp, 1), max_p);
2121 else if (code == WITH_RECORD_EXPR)
2122 return exp;
2123
2124 {
2125 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2126 tree rhs = max_size (TREE_OPERAND (exp, 1),
2127 code == MINUS_EXPR ? ! max_p : max_p);
2128
2129 /* Special-case wanting the maximum value of a MIN_EXPR.
2130 In that case, if one side overflows, return the other.
2131 sizetype is signed, but we know sizes are non-negative.
2132 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2133 overflowing or the maximum possible value and the RHS
2134 a variable. */
2135 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2136 return lhs;
2137 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2138 return rhs;
2139 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2140 && (TREE_OVERFLOW (lhs)
2141 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2142 && ! TREE_CONSTANT (rhs))
2143 return lhs;
2144 else
2145 return fold (build (code, type, lhs, rhs));
2146 }
2147
2148 case 3:
2149 if (code == SAVE_EXPR)
2150 return exp;
2151 else if (code == COND_EXPR)
2152 return fold (build (MAX_EXPR, type,
2153 max_size (TREE_OPERAND (exp, 1), max_p),
2154 max_size (TREE_OPERAND (exp, 2), max_p)));
2155 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2156 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2157 max_size (TREE_OPERAND (exp, 1), max_p));
2158 }
2159 }
2160
2161 gigi_abort (408);
2162 }
2163 \f
2164 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2165 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2166 Return a constructor for the template. */
2167
2168 tree
2169 build_template (template_type, array_type, expr)
2170 tree template_type;
2171 tree array_type;
2172 tree expr;
2173 {
2174 tree template_elts = NULL_TREE;
2175 tree bound_list = NULL_TREE;
2176 tree field;
2177
2178 if (TREE_CODE (array_type) == RECORD_TYPE
2179 && (TYPE_IS_PADDING_P (array_type)
2180 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2181 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2182
2183 if (TREE_CODE (array_type) == ARRAY_TYPE
2184 || (TREE_CODE (array_type) == INTEGER_TYPE
2185 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2186 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2187
2188 /* First make the list for a CONSTRUCTOR for the template. Go down the
2189 field list of the template instead of the type chain because this
2190 array might be an Ada array of arrays and we can't tell where the
2191 nested arrays stop being the underlying object. */
2192
2193 for (field = TYPE_FIELDS (template_type); field;
2194 (bound_list != 0
2195 ? (bound_list = TREE_CHAIN (bound_list))
2196 : (array_type = TREE_TYPE (array_type))),
2197 field = TREE_CHAIN (TREE_CHAIN (field)))
2198 {
2199 tree bounds, min, max;
2200
2201 /* If we have a bound list, get the bounds from there. Likewise
2202 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2203 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2204 This will give us a maximum range. */
2205 if (bound_list != 0)
2206 bounds = TREE_VALUE (bound_list);
2207 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2208 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2209 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2210 && DECL_BY_COMPONENT_PTR_P (expr))
2211 bounds = TREE_TYPE (field);
2212 else
2213 gigi_abort (411);
2214
2215 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2216 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2217
2218 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2219 surround them with a WITH_RECORD_EXPR giving EXPR as the
2220 OBJECT. */
2221 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2222 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2223 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2224 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2225
2226 template_elts = tree_cons (TREE_CHAIN (field), max,
2227 tree_cons (field, min, template_elts));
2228 }
2229
2230 return build_constructor (template_type, nreverse (template_elts));
2231 }
2232 \f
2233 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2234 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2235 in the type contains in its DECL_INITIAL the expression to use when
2236 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2237 to print out an error message if the mechanism cannot be applied to
2238 an object of that type and also for the name. */
2239
2240 tree
2241 build_vms_descriptor (type, mech, gnat_entity)
2242 tree type;
2243 Mechanism_Type mech;
2244 Entity_Id gnat_entity;
2245 {
2246 tree record_type = make_node (RECORD_TYPE);
2247 tree field_list = 0;
2248 int class;
2249 int dtype = 0;
2250 tree inner_type;
2251 int ndim;
2252 int i;
2253 tree *idx_arr;
2254 tree tem;
2255
2256 /* If TYPE is an unconstrained array, use the underlying array type. */
2257 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2258 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2259
2260 /* If this is an array, compute the number of dimensions in the array,
2261 get the index types, and point to the inner type. */
2262 if (TREE_CODE (type) != ARRAY_TYPE)
2263 ndim = 0;
2264 else
2265 for (ndim = 1, inner_type = type;
2266 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2267 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2268 ndim++, inner_type = TREE_TYPE (inner_type))
2269 ;
2270
2271 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2272
2273 if (mech != By_Descriptor_NCA
2274 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2275 for (i = ndim - 1, inner_type = type;
2276 i >= 0;
2277 i--, inner_type = TREE_TYPE (inner_type))
2278 idx_arr[i] = TYPE_DOMAIN (inner_type);
2279 else
2280 for (i = 0, inner_type = type;
2281 i < ndim;
2282 i++, inner_type = TREE_TYPE (inner_type))
2283 idx_arr[i] = TYPE_DOMAIN (inner_type);
2284
2285 /* Now get the DTYPE value. */
2286 switch (TREE_CODE (type))
2287 {
2288 case INTEGER_TYPE:
2289 case ENUMERAL_TYPE:
2290 if (TYPE_VAX_FLOATING_POINT_P (type))
2291 switch ((int) TYPE_DIGITS_VALUE (type))
2292 {
2293 case 6:
2294 dtype = 10;
2295 break;
2296 case 9:
2297 dtype = 11;
2298 break;
2299 case 15:
2300 dtype = 27;
2301 break;
2302 }
2303 else
2304 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2305 {
2306 case 8:
2307 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2308 break;
2309 case 16:
2310 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2311 break;
2312 case 32:
2313 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2314 break;
2315 case 64:
2316 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2317 break;
2318 case 128:
2319 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2320 break;
2321 }
2322 break;
2323
2324 case REAL_TYPE:
2325 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2326 break;
2327
2328 case COMPLEX_TYPE:
2329 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2330 && TYPE_VAX_FLOATING_POINT_P (type))
2331 switch ((int) TYPE_DIGITS_VALUE (type))
2332 {
2333 case 6:
2334 dtype = 12;
2335 break;
2336 case 9:
2337 dtype = 13;
2338 break;
2339 case 15:
2340 dtype = 29;
2341 }
2342 else
2343 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2344 break;
2345
2346 case ARRAY_TYPE:
2347 dtype = 14;
2348 break;
2349
2350 default:
2351 break;
2352 }
2353
2354 /* Get the CLASS value. */
2355 switch (mech)
2356 {
2357 case By_Descriptor_A:
2358 class = 4;
2359 break;
2360 case By_Descriptor_NCA:
2361 class = 10;
2362 break;
2363 case By_Descriptor_SB:
2364 class = 15;
2365 break;
2366 default:
2367 class = 1;
2368 }
2369
2370 /* Make the type for a descriptor for VMS. The first four fields
2371 are the same for all types. */
2372
2373 field_list
2374 = chainon (field_list,
2375 make_descriptor_field
2376 ("LENGTH", type_for_size (16, 1), record_type,
2377 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2378
2379 field_list = chainon (field_list,
2380 make_descriptor_field ("DTYPE", type_for_size (8, 1),
2381 record_type, size_int (dtype)));
2382 field_list = chainon (field_list,
2383 make_descriptor_field ("CLASS", type_for_size (8, 1),
2384 record_type, size_int (class)));
2385
2386 field_list
2387 = chainon (field_list,
2388 make_descriptor_field ("POINTER",
2389 build_pointer_type (type),
2390 record_type,
2391 build1 (ADDR_EXPR,
2392 build_pointer_type (type),
2393 build (PLACEHOLDER_EXPR,
2394 type))));
2395
2396 switch (mech)
2397 {
2398 case By_Descriptor:
2399 case By_Descriptor_S:
2400 break;
2401
2402 case By_Descriptor_SB:
2403 field_list
2404 = chainon (field_list,
2405 make_descriptor_field
2406 ("SB_L1", type_for_size (32, 1), record_type,
2407 TREE_CODE (type) == ARRAY_TYPE
2408 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2409 field_list
2410 = chainon (field_list,
2411 make_descriptor_field
2412 ("SB_L2", type_for_size (32, 1), record_type,
2413 TREE_CODE (type) == ARRAY_TYPE
2414 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2415 break;
2416
2417 case By_Descriptor_A:
2418 case By_Descriptor_NCA:
2419 field_list = chainon (field_list,
2420 make_descriptor_field ("SCALE",
2421 type_for_size (8, 1),
2422 record_type,
2423 size_zero_node));
2424
2425 field_list = chainon (field_list,
2426 make_descriptor_field ("DIGITS",
2427 type_for_size (8, 1),
2428 record_type,
2429 size_zero_node));
2430
2431 field_list
2432 = chainon (field_list,
2433 make_descriptor_field
2434 ("AFLAGS", type_for_size (8, 1), record_type,
2435 size_int (mech == By_Descriptor_NCA
2436 ? 0
2437 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2438 : (TREE_CODE (type) == ARRAY_TYPE
2439 && TYPE_CONVENTION_FORTRAN_P (type)
2440 ? 224 : 192))));
2441
2442 field_list = chainon (field_list,
2443 make_descriptor_field ("DIMCT",
2444 type_for_size (8, 1),
2445 record_type,
2446 size_int (ndim)));
2447
2448 field_list = chainon (field_list,
2449 make_descriptor_field ("ARSIZE",
2450 type_for_size (32, 1),
2451 record_type,
2452 size_in_bytes (type)));
2453
2454 /* Now build a pointer to the 0,0,0... element. */
2455 tem = build (PLACEHOLDER_EXPR, type);
2456 for (i = 0, inner_type = type; i < ndim;
2457 i++, inner_type = TREE_TYPE (inner_type))
2458 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2459 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2460
2461 field_list
2462 = chainon (field_list,
2463 make_descriptor_field
2464 ("A0", build_pointer_type (inner_type), record_type,
2465 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2466
2467 /* Next come the addressing coefficients. */
2468 tem = size_int (1);
2469 for (i = 0; i < ndim; i++)
2470 {
2471 char fname[3];
2472 tree idx_length
2473 = size_binop (MULT_EXPR, tem,
2474 size_binop (PLUS_EXPR,
2475 size_binop (MINUS_EXPR,
2476 TYPE_MAX_VALUE (idx_arr[i]),
2477 TYPE_MIN_VALUE (idx_arr[i])),
2478 size_int (1)));
2479
2480 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2481 fname[1] = '0' + i, fname[2] = 0;
2482 field_list = chainon (field_list,
2483 make_descriptor_field (fname,
2484 type_for_size (32, 1),
2485 record_type,
2486 idx_length));
2487
2488 if (mech == By_Descriptor_NCA)
2489 tem = idx_length;
2490 }
2491
2492 /* Finally here are the bounds. */
2493 for (i = 0; i < ndim; i++)
2494 {
2495 char fname[3];
2496
2497 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2498 field_list
2499 = chainon (field_list,
2500 make_descriptor_field
2501 (fname, type_for_size (32, 1), record_type,
2502 TYPE_MIN_VALUE (idx_arr[i])));
2503
2504 fname[0] = 'U';
2505 field_list
2506 = chainon (field_list,
2507 make_descriptor_field
2508 (fname, type_for_size (32, 1), record_type,
2509 TYPE_MAX_VALUE (idx_arr[i])));
2510 }
2511 break;
2512
2513 default:
2514 post_error ("unsupported descriptor type for &", gnat_entity);
2515 }
2516
2517 finish_record_type (record_type, field_list, 0, 1);
2518 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2519 record_type));
2520
2521 return record_type;
2522 }
2523
2524 /* Utility routine for above code to make a field. */
2525
2526 static tree
2527 make_descriptor_field (name, type, rec_type, initial)
2528 const char *name;
2529 tree type;
2530 tree rec_type;
2531 tree initial;
2532 {
2533 tree field
2534 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2535
2536 DECL_INITIAL (field) = initial;
2537 return field;
2538 }
2539 \f
2540 /* Build a type to be used to represent an aliased object whose nominal
2541 type is an unconstrained array. This consists of a RECORD_TYPE containing
2542 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2543 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2544 is used to represent an arbitrary unconstrained object. Use NAME
2545 as the name of the record. */
2546
2547 tree
2548 build_unc_object_type (template_type, object_type, name)
2549 tree template_type;
2550 tree object_type;
2551 tree name;
2552 {
2553 tree type = make_node (RECORD_TYPE);
2554 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2555 template_type, type, 0, 0, 0, 1);
2556 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2557 type, 0, 0, 0, 1);
2558
2559 TYPE_NAME (type) = name;
2560 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2561 finish_record_type (type,
2562 chainon (chainon (NULL_TREE, template_field),
2563 array_field),
2564 0, 0);
2565
2566 return type;
2567 }
2568 \f
2569 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2570 the normal case this is just two adjustments, but we have more to do
2571 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2572
2573 void
2574 update_pointer_to (old_type, new_type)
2575 tree old_type;
2576 tree new_type;
2577 {
2578 tree ptr = TYPE_POINTER_TO (old_type);
2579 tree ref = TYPE_REFERENCE_TO (old_type);
2580 tree type;
2581
2582 /* If this is the main variant, process all the other variants first. */
2583 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2584 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2585 type = TYPE_NEXT_VARIANT (type))
2586 update_pointer_to (type, new_type);
2587
2588 /* If no pointer or reference, we are done. Otherwise, get the new type with
2589 the same qualifiers as the old type and see if it is the same as the old
2590 type. */
2591 if (ptr == 0 && ref == 0)
2592 return;
2593
2594 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2595 if (old_type == new_type)
2596 return;
2597
2598 /* First handle the simple case. */
2599 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2600 {
2601 if (ptr != 0)
2602 TREE_TYPE (ptr) = new_type;
2603 TYPE_POINTER_TO (new_type) = ptr;
2604
2605 if (ref != 0)
2606 TREE_TYPE (ref) = new_type;
2607 TYPE_REFERENCE_TO (new_type) = ref;
2608
2609 if (ptr != 0 && TYPE_NAME (ptr) != 0
2610 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2611 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2612 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2613 global_bindings_p (), 0);
2614 if (ref != 0 && TYPE_NAME (ref) != 0
2615 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2616 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2617 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2618 global_bindings_p (), 0);
2619 }
2620
2621 /* Now deal with the unconstrained array case. In this case the "pointer"
2622 is actually a RECORD_TYPE where the types of both fields are
2623 pointers to void. In that case, copy the field list from the
2624 old type to the new one and update the fields' context. */
2625 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2626 gigi_abort (412);
2627
2628 else
2629 {
2630 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2631 tree ptr_temp_type;
2632 tree new_ref;
2633 tree var;
2634
2635 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2636 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2637 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2638
2639 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2640 template bounds.
2641
2642 ??? This is now the only use of gnat_substitute_in_type, which
2643 is now a very "heavy" routine to do this, so it should be replaced
2644 at some point. */
2645 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2646 new_ref = build (COMPONENT_REF, ptr_temp_type,
2647 build (PLACEHOLDER_EXPR, ptr),
2648 TREE_CHAIN (TYPE_FIELDS (ptr)));
2649
2650 update_pointer_to
2651 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2652 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2653 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2654
2655 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2656 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2657
2658 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2659 = TREE_TYPE (new_type) = ptr;
2660
2661 /* Now handle updating the allocation record, what the thin pointer
2662 points to. Update all pointers from the old record into the new
2663 one, update the types of the fields, and recompute the size. */
2664
2665 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2666
2667 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2668 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2669 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2670 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2671 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2672 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2673 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2674
2675 TYPE_SIZE (new_obj_rec)
2676 = size_binop (PLUS_EXPR,
2677 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2678 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2679 TYPE_SIZE_UNIT (new_obj_rec)
2680 = size_binop (PLUS_EXPR,
2681 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2682 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2683 rest_of_type_compilation (ptr, global_bindings_p ());
2684 }
2685 }
2686 \f
2687 /* Convert a pointer to a constrained array into a pointer to a fat
2688 pointer. This involves making or finding a template. */
2689
2690 static tree
2691 convert_to_fat_pointer (type, expr)
2692 tree type;
2693 tree expr;
2694 {
2695 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2696 tree template, template_addr;
2697 tree etype = TREE_TYPE (expr);
2698
2699 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2700 pointer to the template and array. */
2701 if (integer_zerop (expr))
2702 return
2703 build_constructor
2704 (type,
2705 tree_cons (TYPE_FIELDS (type),
2706 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2707 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2708 convert (build_pointer_type (template_type),
2709 expr),
2710 NULL_TREE)));
2711
2712 /* If EXPR is a thin pointer, make the template and data from the record. */
2713
2714 else if (TYPE_THIN_POINTER_P (etype))
2715 {
2716 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2717
2718 expr = save_expr (expr);
2719 if (TREE_CODE (expr) == ADDR_EXPR)
2720 expr = TREE_OPERAND (expr, 0);
2721 else
2722 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2723
2724 template = build_component_ref (expr, NULL_TREE, fields);
2725 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2726 build_component_ref (expr, NULL_TREE,
2727 TREE_CHAIN (fields)));
2728 }
2729 else
2730 /* Otherwise, build the constructor for the template. */
2731 template = build_template (template_type, TREE_TYPE (etype), expr);
2732
2733 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2734
2735 /* The result is a CONSTRUCTOR for the fat pointer. */
2736 return
2737 build_constructor (type,
2738 tree_cons (TYPE_FIELDS (type), expr,
2739 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2740 template_addr, NULL_TREE)));
2741 }
2742 \f
2743 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2744 is something that is a fat pointer, so convert to it first if it EXPR
2745 is not already a fat pointer. */
2746
2747 static tree
2748 convert_to_thin_pointer (type, expr)
2749 tree type;
2750 tree expr;
2751 {
2752 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2753 expr
2754 = convert_to_fat_pointer
2755 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2756
2757 /* We get the pointer to the data and use a NOP_EXPR to make it the
2758 proper GCC type. */
2759 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2760 expr = build1 (NOP_EXPR, type, expr);
2761
2762 return expr;
2763 }
2764 \f
2765 /* Create an expression whose value is that of EXPR,
2766 converted to type TYPE. The TREE_TYPE of the value
2767 is always TYPE. This function implements all reasonable
2768 conversions; callers should filter out those that are
2769 not permitted by the language being compiled. */
2770
2771 tree
2772 convert (type, expr)
2773 tree type, expr;
2774 {
2775 enum tree_code code = TREE_CODE (type);
2776 tree etype = TREE_TYPE (expr);
2777 enum tree_code ecode = TREE_CODE (etype);
2778 tree tem;
2779
2780 /* If EXPR is already the right type, we are done. */
2781 if (type == etype)
2782 return expr;
2783
2784 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2785 new one. */
2786 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2787 return build (WITH_RECORD_EXPR, type,
2788 convert (type, TREE_OPERAND (expr, 0)),
2789 TREE_OPERAND (expr, 1));
2790
2791 /* If the input type has padding, remove it by doing a component reference
2792 to the field. If the output type has padding, make a constructor
2793 to build the record. If both input and output have padding and are
2794 of variable size, do this as an unchecked conversion. */
2795 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2796 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2797 && (! TREE_CONSTANT (TYPE_SIZE (type))
2798 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2799 ;
2800 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2801 {
2802 /* If we have just converted to this padded type, just get
2803 the inner expression. */
2804 if (TREE_CODE (expr) == CONSTRUCTOR
2805 && CONSTRUCTOR_ELTS (expr) != 0
2806 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2807 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2808 else
2809 return convert (type, build_component_ref (expr, NULL_TREE,
2810 TYPE_FIELDS (etype)));
2811 }
2812 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2813 {
2814 /* If we previously converted from another type and our type is
2815 of variable size, remove the conversion to avoid the need for
2816 variable-size temporaries. */
2817 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
2818 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2819 expr = TREE_OPERAND (expr, 0);
2820
2821 /* If we are just removing the padding from expr, convert the original
2822 object if we have variable size. That will avoid the need
2823 for some variable-size temporaries. */
2824 if (TREE_CODE (expr) == COMPONENT_REF
2825 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2826 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2827 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2828 return convert (type, TREE_OPERAND (expr, 0));
2829
2830 /* If the result type is a padded type with a self-referentially-sized
2831 field and the expression type is a record, do this as an
2832 unchecked converstion. */
2833 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2834 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2835 && TREE_CODE (etype) == RECORD_TYPE)
2836 return unchecked_convert (type, expr);
2837
2838 else
2839 return
2840 build_constructor (type,
2841 tree_cons (TYPE_FIELDS (type),
2842 convert (TREE_TYPE
2843 (TYPE_FIELDS (type)),
2844 expr),
2845 NULL_TREE));
2846 }
2847
2848 /* If the input is a biased type, adjust first. */
2849 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2850 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2851 fold (build1 (GNAT_NOP_EXPR,
2852 TREE_TYPE (etype), expr)),
2853 TYPE_MIN_VALUE (etype))));
2854
2855 /* If the input is a left-justified modular type, we need to extract
2856 the actual object before converting it to any other type with the
2857 exception of an unconstrained array. */
2858 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2859 && code != UNCONSTRAINED_ARRAY_TYPE)
2860 return convert (type, build_component_ref (expr, NULL_TREE,
2861 TYPE_FIELDS (etype)));
2862
2863 /* If converting a type that does not contain a template into one
2864 that does, convert to the data type and then build the template. */
2865 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2866 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2867 {
2868 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2869
2870 return
2871 build_constructor
2872 (type,
2873 tree_cons (TYPE_FIELDS (type),
2874 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2875 obj_type, NULL_TREE),
2876 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2877 convert (obj_type, expr), NULL_TREE)));
2878 }
2879
2880 /* There are some special cases of expressions that we process
2881 specially. */
2882 switch (TREE_CODE (expr))
2883 {
2884 case ERROR_MARK:
2885 return expr;
2886
2887 case TRANSFORM_EXPR:
2888 case NULL_EXPR:
2889 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2890 conversion in gnat_expand_expr. NULL_EXPR does not represent
2891 and actual value, so no conversion is needed. */
2892 TREE_TYPE (expr) = type;
2893 return expr;
2894
2895 case STRING_CST:
2896 case CONSTRUCTOR:
2897 /* If we are converting a STRING_CST to another constrained array type,
2898 just make a new one in the proper type. Likewise for a
2899 CONSTRUCTOR. But if the mode of the type is different, we must
2900 ensure a new RTL is made for the constant. */
2901 if (code == ecode && AGGREGATE_TYPE_P (etype)
2902 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2903 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2904 {
2905 expr = copy_node (expr);
2906 TREE_TYPE (expr) = type;
2907
2908 if (TYPE_MODE (type) != TYPE_MODE (etype))
2909 TREE_CST_RTL (expr) = 0;
2910
2911 return expr;
2912 }
2913 break;
2914
2915 case COMPONENT_REF:
2916 /* If we are converting between two aggregate types of the same
2917 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2918 This avoid unneeded conversions which makes reference computations
2919 more complex. */
2920 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2921 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2922 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2923 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2924 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2925 TREE_OPERAND (expr, 1));
2926
2927 break;
2928
2929 case UNCONSTRAINED_ARRAY_REF:
2930 /* Convert this to the type of the inner array by getting the address of
2931 the array from the template. */
2932 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2933 build_component_ref (TREE_OPERAND (expr, 0),
2934 get_identifier ("P_ARRAY"),
2935 NULL_TREE));
2936 etype = TREE_TYPE (expr);
2937 ecode = TREE_CODE (etype);
2938 break;
2939
2940 case UNCHECKED_CONVERT_EXPR:
2941 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2942 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2943 return convert (type, TREE_OPERAND (expr, 0));
2944 break;
2945
2946 case INDIRECT_REF:
2947 /* If both types are record types, just convert the pointer and
2948 make a new INDIRECT_REF.
2949
2950 ??? Disable this for now since it causes problems with the
2951 code in build_binary_op for MODIFY_EXPR which wants to
2952 strip off conversions. But that code really is a mess and
2953 we need to do this a much better way some time. */
2954 if (0
2955 && (TREE_CODE (type) == RECORD_TYPE
2956 || TREE_CODE (type) == UNION_TYPE)
2957 && (TREE_CODE (etype) == RECORD_TYPE
2958 || TREE_CODE (etype) == UNION_TYPE)
2959 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2960 return build_unary_op (INDIRECT_REF, NULL_TREE,
2961 convert (build_pointer_type (type),
2962 TREE_OPERAND (expr, 0)));
2963 break;
2964
2965 default:
2966 break;
2967 }
2968
2969 /* Check for converting to a pointer to an unconstrained array. */
2970 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2971 return convert_to_fat_pointer (type, expr);
2972
2973 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2974 || (code == INTEGER_CST && ecode == INTEGER_CST
2975 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2976 return fold (build1 (NOP_EXPR, type, expr));
2977
2978 switch (code)
2979 {
2980 case VOID_TYPE:
2981 return build1 (CONVERT_EXPR, type, expr);
2982
2983 case INTEGER_TYPE:
2984 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2985 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
2986 return unchecked_convert (type, expr);
2987 else if (TYPE_BIASED_REPRESENTATION_P (type))
2988 return fold (build1 (CONVERT_EXPR, type,
2989 fold (build (MINUS_EXPR, TREE_TYPE (type),
2990 convert (TREE_TYPE (type), expr),
2991 TYPE_MIN_VALUE (type)))));
2992
2993 /* ... fall through ... */
2994
2995 case ENUMERAL_TYPE:
2996 return fold (convert_to_integer (type, expr));
2997
2998 case POINTER_TYPE:
2999 case REFERENCE_TYPE:
3000 /* If converting between two pointers to records denoting
3001 both a template and type, adjust if needed to account
3002 for any differing offsets, since one might be negative. */
3003 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3004 {
3005 tree bit_diff
3006 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3007 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3008 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3009 sbitsize_int (BITS_PER_UNIT));
3010
3011 expr = build1 (NOP_EXPR, type, expr);
3012 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3013 if (integer_zerop (byte_diff))
3014 return expr;
3015
3016 return build_binary_op (PLUS_EXPR, type, expr,
3017 fold (convert_to_pointer (type, byte_diff)));
3018 }
3019
3020 /* If converting to a thin pointer, handle specially. */
3021 if (TYPE_THIN_POINTER_P (type)
3022 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3023 return convert_to_thin_pointer (type, expr);
3024
3025 /* If converting fat pointer to normal pointer, get the pointer to the
3026 array and then convert it. */
3027 else if (TYPE_FAT_POINTER_P (etype))
3028 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3029 NULL_TREE);
3030
3031 return fold (convert_to_pointer (type, expr));
3032
3033 case REAL_TYPE:
3034 return fold (convert_to_real (type, expr));
3035
3036 case RECORD_TYPE:
3037 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3038 return
3039 build_constructor
3040 (type, tree_cons (TYPE_FIELDS (type),
3041 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3042 NULL_TREE));
3043
3044 /* ... fall through ... */
3045
3046 case ARRAY_TYPE:
3047 /* In these cases, assume the front-end has validated the conversion.
3048 If the conversion is valid, it will be a bit-wise conversion, so
3049 it can be viewed as an unchecked conversion. */
3050 return unchecked_convert (type, expr);
3051
3052 case UNION_TYPE:
3053 /* Just validate that the type is indeed that of a field
3054 of the type. Then make the simple conversion. */
3055 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3056 if (TREE_TYPE (tem) == etype)
3057 return build1 (CONVERT_EXPR, type, expr);
3058
3059 gigi_abort (413);
3060
3061 case UNCONSTRAINED_ARRAY_TYPE:
3062 /* If EXPR is a constrained array, take its address, convert it to a
3063 fat pointer, and then dereference it. Likewise if EXPR is a
3064 record containing both a template and a constrained array.
3065 Note that a record representing a left justified modular type
3066 always represents a packed constrained array. */
3067 if (ecode == ARRAY_TYPE
3068 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3069 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3070 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3071 return
3072 build_unary_op
3073 (INDIRECT_REF, NULL_TREE,
3074 convert_to_fat_pointer (TREE_TYPE (type),
3075 build_unary_op (ADDR_EXPR,
3076 NULL_TREE, expr)));
3077
3078 /* Do something very similar for converting one unconstrained
3079 array to another. */
3080 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3081 return
3082 build_unary_op (INDIRECT_REF, NULL_TREE,
3083 convert (TREE_TYPE (type),
3084 build_unary_op (ADDR_EXPR,
3085 NULL_TREE, expr)));
3086 else
3087 gigi_abort (409);
3088
3089 case COMPLEX_TYPE:
3090 return fold (convert_to_complex (type, expr));
3091
3092 default:
3093 gigi_abort (410);
3094 }
3095 }
3096 \f
3097 /* Remove all conversions that are done in EXP. This includes converting
3098 from a padded type or converting to a left-justified modular type. */
3099
3100 tree
3101 remove_conversions (exp)
3102 tree exp;
3103 {
3104 switch (TREE_CODE (exp))
3105 {
3106 case CONSTRUCTOR:
3107 if (TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3108 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3109 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)));
3110 break;
3111
3112 case COMPONENT_REF:
3113 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3114 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3115 return remove_conversions (TREE_OPERAND (exp, 0));
3116 break;
3117
3118 case UNCHECKED_CONVERT_EXPR:
3119 case NOP_EXPR: case CONVERT_EXPR:
3120 return remove_conversions (TREE_OPERAND (exp, 0));
3121
3122 default:
3123 break;
3124 }
3125
3126 return exp;
3127 }
3128 \f
3129 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3130 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3131 likewise return an expression pointing to the underlying array. */
3132
3133 tree
3134 maybe_unconstrained_array (exp)
3135 tree exp;
3136 {
3137 enum tree_code code = TREE_CODE (exp);
3138 tree new;
3139
3140 switch (TREE_CODE (TREE_TYPE (exp)))
3141 {
3142 case UNCONSTRAINED_ARRAY_TYPE:
3143 if (code == UNCONSTRAINED_ARRAY_REF)
3144 {
3145 new
3146 = build_unary_op (INDIRECT_REF, NULL_TREE,
3147 build_component_ref (TREE_OPERAND (exp, 0),
3148 get_identifier ("P_ARRAY"),
3149 NULL_TREE));
3150 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3151 return new;
3152 }
3153
3154 else if (code == NULL_EXPR)
3155 return build1 (NULL_EXPR,
3156 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3157 (TREE_TYPE (TREE_TYPE (exp))))),
3158 TREE_OPERAND (exp, 0));
3159
3160 else if (code == WITH_RECORD_EXPR
3161 && (TREE_OPERAND (exp, 0)
3162 != (new = maybe_unconstrained_array
3163 (TREE_OPERAND (exp, 0)))))
3164 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3165 TREE_OPERAND (exp, 1));
3166
3167 case RECORD_TYPE:
3168 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3169 {
3170 new
3171 = build_component_ref (exp, NULL_TREE,
3172 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3173 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3174 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3175 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3176
3177 return new;
3178 }
3179 break;
3180
3181 default:
3182 break;
3183 }
3184
3185 return exp;
3186 }
3187 \f
3188 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3189
3190 tree
3191 unchecked_convert (type, expr)
3192 tree type;
3193 tree expr;
3194 {
3195 tree etype = TREE_TYPE (expr);
3196
3197 /* If the expression is already the right type, we are done. */
3198 if (etype == type)
3199 return expr;
3200
3201 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3202 new one. */
3203 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3204 return build (WITH_RECORD_EXPR, type,
3205 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3206 TREE_OPERAND (expr, 1));
3207
3208 /* If both types types are integral just do a normal conversion.
3209 Likewise for a conversion to an unconstrained array. */
3210 if ((((INTEGRAL_TYPE_P (type)
3211 && ! (TREE_CODE (type) == INTEGER_TYPE
3212 && TYPE_VAX_FLOATING_POINT_P (type)))
3213 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3214 || (TREE_CODE (type) == RECORD_TYPE
3215 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3216 && ((INTEGRAL_TYPE_P (etype)
3217 && ! (TREE_CODE (etype) == INTEGER_TYPE
3218 && TYPE_VAX_FLOATING_POINT_P (etype)))
3219 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3220 || (TREE_CODE (etype) == RECORD_TYPE
3221 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3222 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3223 {
3224 tree rtype = type;
3225
3226 if (TREE_CODE (etype) == INTEGER_TYPE
3227 && TYPE_BIASED_REPRESENTATION_P (etype))
3228 {
3229 tree ntype = copy_type (etype);
3230
3231 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3232 TYPE_MAIN_VARIANT (ntype) = ntype;
3233 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3234 }
3235
3236 if (TREE_CODE (type) == INTEGER_TYPE
3237 && TYPE_BIASED_REPRESENTATION_P (type))
3238 {
3239 rtype = copy_type (type);
3240 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3241 TYPE_MAIN_VARIANT (rtype) = rtype;
3242 }
3243
3244 expr = convert (rtype, expr);
3245 if (type != rtype)
3246 expr = build1 (GNAT_NOP_EXPR, type, expr);
3247 }
3248
3249 /* If we are converting TO an integral type whose precision is not the
3250 same as its size, first unchecked convert to a record that contains
3251 an object of the output type. Then extract the field. */
3252 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3253 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3254 GET_MODE_BITSIZE (TYPE_MODE (type))))
3255 {
3256 tree rec_type = make_node (RECORD_TYPE);
3257 tree field = create_field_decl (get_identifier ("OBJ"), type,
3258 rec_type, 1, 0, 0, 0);
3259
3260 TYPE_FIELDS (rec_type) = field;
3261 layout_type (rec_type);
3262
3263 expr = unchecked_convert (rec_type, expr);
3264 expr = build_component_ref (expr, NULL_TREE, field);
3265 }
3266
3267 /* Similarly for integral input type whose precision is not equal to its
3268 size. */
3269 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3270 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3271 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3272 {
3273 tree rec_type = make_node (RECORD_TYPE);
3274 tree field
3275 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3276 1, 0, 0, 0);
3277
3278 TYPE_FIELDS (rec_type) = field;
3279 layout_type (rec_type);
3280
3281 expr = build_constructor (rec_type, build_tree_list (field, expr));
3282 expr = unchecked_convert (type, expr);
3283 }
3284
3285 /* We have a special case when we are converting between two
3286 unconstrained array types. In that case, take the address,
3287 convert the fat pointer types, and dereference. */
3288 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3289 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3290 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3291 build1 (UNCHECKED_CONVERT_EXPR, TREE_TYPE (type),
3292 build_unary_op (ADDR_EXPR, NULL_TREE,
3293 expr)));
3294
3295 /* If both types are aggregates with the same mode and alignment (except
3296 if the result is a UNION_TYPE), we can do this as a normal conversion. */
3297 else if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3298 && TREE_CODE (type) != UNION_TYPE
3299 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3300 && TYPE_MODE (type) == TYPE_MODE (etype))
3301 expr = build1 (CONVERT_EXPR, type, expr);
3302
3303 else
3304 {
3305 expr = maybe_unconstrained_array (expr);
3306 etype = TREE_TYPE (expr);
3307 expr = build1 (UNCHECKED_CONVERT_EXPR, type, expr);
3308 }
3309
3310
3311 /* If the result is an integral type whose size is not equal to
3312 the size of the underlying machine type, sign- or zero-extend
3313 the result. We need not do this in the case where the input is
3314 an integral type of the same precision and signedness or if the output
3315 is a biased type or if both the input and output are unsigned. */
3316 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3317 && ! (TREE_CODE (type) == INTEGER_TYPE
3318 && TYPE_BIASED_REPRESENTATION_P (type))
3319 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3320 GET_MODE_BITSIZE (TYPE_MODE (type)))
3321 && ! (INTEGRAL_TYPE_P (etype)
3322 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3323 && operand_equal_p (TYPE_RM_SIZE (type),
3324 (TYPE_RM_SIZE (etype) != 0
3325 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3326 0))
3327 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3328 {
3329 tree base_type = type_for_mode (TYPE_MODE (type), TREE_UNSIGNED (type));
3330 tree shift_expr
3331 = convert (base_type,
3332 size_binop (MINUS_EXPR,
3333 bitsize_int
3334 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3335 TYPE_RM_SIZE (type)));
3336 expr
3337 = convert (type,
3338 build_binary_op (RSHIFT_EXPR, base_type,
3339 build_binary_op (LSHIFT_EXPR, base_type,
3340 convert (base_type, expr),
3341 shift_expr),
3342 shift_expr));
3343 }
3344
3345 /* An unchecked conversion should never raise Constraint_Error. The code
3346 below assumes that GCC's conversion routines overflow the same
3347 way that the underlying hardware does. This is probably true. In
3348 the rare case when it isn't, we can rely on the fact that such
3349 conversions are erroneous anyway. */
3350 if (TREE_CODE (expr) == INTEGER_CST)
3351 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3352
3353 /* If the sizes of the types differ and this is an UNCHECKED_CONVERT_EXPR,
3354 show no longer constant. */
3355 if (TREE_CODE (expr) == UNCHECKED_CONVERT_EXPR
3356 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3357 TREE_CONSTANT (expr) = 0;
3358
3359 return expr;
3360 }