c-common.c (c_unsafe_for_reeval): Rename.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
25 * *
26 ****************************************************************************/
27
28 #include "config.h"
29 #include "system.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "defaults.h"
33 #include "toplev.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "convert.h"
37
38 #include "ada.h"
39 #include "types.h"
40 #include "atree.h"
41 #include "elists.h"
42 #include "namet.h"
43 #include "nlists.h"
44 #include "stringt.h"
45 #include "uintp.h"
46 #include "fe.h"
47 #include "sinfo.h"
48 #include "einfo.h"
49 #include "ada-tree.h"
50 #include "gigi.h"
51
52 #ifndef MAX_FIXED_MODE_SIZE
53 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
54 #endif
55
56 #ifndef MAX_BITS_PER_WORD
57 #define MAX_BITS_PER_WORD BITS_PER_WORD
58 #endif
59
60 /* If nonzero, pretend we are allocating at global level. */
61 int force_global;
62
63 /* Tree nodes for the various types and decls we create. */
64 tree gnat_std_decls[(int) ADT_LAST];
65
66 /* Functions to call for each of the possible raise reasons. */
67 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
68
69 /* Associates a GNAT tree node to a GCC tree node. It is used in
70 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
71 of `save_gnu_tree' for more info. */
72 static tree *associate_gnat_to_gnu;
73
74 /* This listhead is used to record any global objects that need elaboration.
75 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
76 initial value to assign. */
77
78 static tree pending_elaborations;
79
80 /* This stack allows us to momentarily switch to generating elaboration
81 lists for an inner context. */
82
83 static struct e_stack {struct e_stack *next; tree elab_list; } *elist_stack;
84
85 /* This variable keeps a table for types for each precision so that we only
86 allocate each of them once. Signed and unsigned types are kept separate.
87
88 Note that these types are only used when fold-const requests something
89 special. Perhaps we should NOT share these types; we'll see how it
90 goes later. */
91 static tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
92
93 /* Likewise for float types, but record these by mode. */
94 static tree float_types[NUM_MACHINE_MODES];
95
96 /* For each binding contour we allocate a binding_level structure which records
97 the entities defined or declared in that contour. Contours include:
98
99 the global one
100 one for each subprogram definition
101 one for each compound statement (declare block)
102
103 Binding contours are used to create GCC tree BLOCK nodes. */
104
105 struct binding_level
106 {
107 /* A chain of ..._DECL nodes for all variables, constants, functions,
108 parameters and type declarations. These ..._DECL nodes are chained
109 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
110 in the reverse of the order supplied to be compatible with the
111 back-end. */
112 tree names;
113 /* For each level (except the global one), a chain of BLOCK nodes for all
114 the levels that were entered and exited one level down from this one. */
115 tree blocks;
116 /* The BLOCK node for this level, if one has been preallocated.
117 If 0, the BLOCK is allocated (if needed) when the level is popped. */
118 tree this_block;
119 /* The binding level containing this one (the enclosing binding level). */
120 struct binding_level *level_chain;
121 };
122
123 /* The binding level currently in effect. */
124 static struct binding_level *current_binding_level = NULL;
125
126 /* A chain of binding_level structures awaiting reuse. */
127 static struct binding_level *free_binding_level = NULL;
128
129 /* The outermost binding level. This binding level is created when the
130 compiler is started and it will exist through the entire compilation. */
131 static struct binding_level *global_binding_level;
132
133 /* Binding level structures are initialized by copying this one. */
134 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
135
136 static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
137 static tree compute_related_constant PARAMS ((tree, tree));
138 static tree split_plus PARAMS ((tree, tree *));
139 static int value_zerop PARAMS ((tree));
140 static tree float_type_for_size PARAMS ((int, enum machine_mode));
141 static tree convert_to_fat_pointer PARAMS ((tree, tree));
142 static tree convert_to_thin_pointer PARAMS ((tree, tree));
143 static tree make_descriptor_field PARAMS ((const char *,tree, tree,
144 tree));
145 static void mark_binding_level PARAMS ((PTR));
146 static void mark_e_stack PARAMS ((PTR));
147 \f
148 /* Initialize the association of GNAT nodes to GCC trees. */
149
150 void
151 init_gnat_to_gnu ()
152 {
153 Node_Id gnat_node;
154
155 associate_gnat_to_gnu = (tree *) xmalloc (max_gnat_nodes * sizeof (tree));
156 ggc_add_tree_root (associate_gnat_to_gnu, max_gnat_nodes);
157
158 for (gnat_node = 0; gnat_node < max_gnat_nodes; gnat_node++)
159 associate_gnat_to_gnu[gnat_node] = NULL_TREE;
160
161 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
162 ggc_add_tree_root (&pending_elaborations, 1);
163 ggc_add_root ((PTR) &elist_stack, 1, sizeof (struct e_stack), mark_e_stack);
164 ggc_add_tree_root (&signed_and_unsigned_types[0][0],
165 (sizeof signed_and_unsigned_types
166 / sizeof signed_and_unsigned_types[0][0]));
167 ggc_add_tree_root (float_types, ARRAY_SIZE (float_types));
168
169 ggc_add_root (&current_binding_level, 1, sizeof current_binding_level,
170 mark_binding_level);
171 }
172
173 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
174 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
175 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
176
177 If GNU_DECL is zero, a previous association is to be reset. */
178
179 void
180 save_gnu_tree (gnat_entity, gnu_decl, no_check)
181 Entity_Id gnat_entity;
182 tree gnu_decl;
183 int no_check;
184 {
185 if (gnu_decl
186 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
187 || (! no_check && ! DECL_P (gnu_decl))))
188 gigi_abort (401);
189
190 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
191 }
192
193 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
194 Return the ..._DECL node that was associated with it. If there is no tree
195 node associated with GNAT_ENTITY, abort.
196
197 In some cases, such as delayed elaboration or expressions that need to
198 be elaborated only once, GNAT_ENTITY is really not an entity. */
199
200 tree
201 get_gnu_tree (gnat_entity)
202 Entity_Id gnat_entity;
203 {
204 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
205 gigi_abort (402);
206
207 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
208 }
209
210 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
211
212 int
213 present_gnu_tree (gnat_entity)
214 Entity_Id gnat_entity;
215 {
216 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
217 }
218
219 \f
220 /* Return non-zero if we are currently in the global binding level. */
221
222 int
223 global_bindings_p ()
224 {
225 return (force_global != 0 || current_binding_level == global_binding_level
226 ? -1 : 0);
227 }
228
229 /* Return the list of declarations in the current level. Note that this list
230 is in reverse order (it has to be so for back-end compatibility). */
231
232 tree
233 getdecls ()
234 {
235 return current_binding_level->names;
236 }
237
238 /* Nonzero if the current level needs to have a BLOCK made. */
239
240 int
241 kept_level_p ()
242 {
243 return (current_binding_level->names != 0);
244 }
245
246 /* Enter a new binding level. The input parameter is ignored, but has to be
247 specified for back-end compatibility. */
248
249 void
250 pushlevel (ignore)
251 int ignore ATTRIBUTE_UNUSED;
252 {
253 struct binding_level *newlevel = NULL;
254
255 /* Reuse a struct for this binding level, if there is one. */
256 if (free_binding_level)
257 {
258 newlevel = free_binding_level;
259 free_binding_level = free_binding_level->level_chain;
260 }
261 else
262 newlevel
263 = (struct binding_level *) xmalloc (sizeof (struct binding_level));
264
265 *newlevel = clear_binding_level;
266
267 /* Add this level to the front of the chain (stack) of levels that are
268 active. */
269 newlevel->level_chain = current_binding_level;
270 current_binding_level = newlevel;
271 }
272
273 /* Exit a binding level.
274 Pop the level off, and restore the state of the identifier-decl mappings
275 that were in effect when this level was entered.
276
277 If KEEP is nonzero, this level had explicit declarations, so
278 and create a "block" (a BLOCK node) for the level
279 to record its declarations and subblocks for symbol table output.
280
281 If FUNCTIONBODY is nonzero, this level is the body of a function,
282 so create a block as if KEEP were set and also clear out all
283 label names.
284
285 If REVERSE is nonzero, reverse the order of decls before putting
286 them into the BLOCK. */
287
288 tree
289 poplevel (keep, reverse, functionbody)
290 int keep;
291 int reverse;
292 int functionbody;
293 {
294 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
295 binding level that we are about to exit and which is returned by this
296 routine. */
297 tree block = NULL_TREE;
298 tree decl_chain;
299 tree decl_node;
300 tree subblock_chain = current_binding_level->blocks;
301 tree subblock_node;
302 int block_previously_created;
303
304 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
305 nodes chained through the `names' field of current_binding_level are in
306 reverse order except for PARM_DECL node, which are explicitly stored in
307 the right order. */
308 current_binding_level->names
309 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
310 : current_binding_level->names;
311
312 /* Output any nested inline functions within this block which must be
313 compiled because their address is needed. */
314 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
315 if (TREE_CODE (decl_node) == FUNCTION_DECL
316 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
317 && DECL_INITIAL (decl_node) != 0)
318 {
319 push_function_context ();
320 output_inline_function (decl_node);
321 pop_function_context ();
322 }
323
324 block = 0;
325 block_previously_created = (current_binding_level->this_block != 0);
326 if (block_previously_created)
327 block = current_binding_level->this_block;
328 else if (keep || functionbody)
329 block = make_node (BLOCK);
330 if (block != 0)
331 {
332 BLOCK_VARS (block) = keep ? decl_chain : 0;
333 BLOCK_SUBBLOCKS (block) = subblock_chain;
334 }
335
336 /* Record the BLOCK node just built as the subblock its enclosing scope. */
337 for (subblock_node = subblock_chain; subblock_node;
338 subblock_node = TREE_CHAIN (subblock_node))
339 BLOCK_SUPERCONTEXT (subblock_node) = block;
340
341 /* Clear out the meanings of the local variables of this level. */
342
343 for (subblock_node = decl_chain; subblock_node;
344 subblock_node = TREE_CHAIN (subblock_node))
345 if (DECL_NAME (subblock_node) != 0)
346 /* If the identifier was used or addressed via a local extern decl,
347 don't forget that fact. */
348 if (DECL_EXTERNAL (subblock_node))
349 {
350 if (TREE_USED (subblock_node))
351 TREE_USED (DECL_NAME (subblock_node)) = 1;
352 if (TREE_ADDRESSABLE (subblock_node))
353 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
354 }
355
356 {
357 /* Pop the current level, and free the structure for reuse. */
358 struct binding_level *level = current_binding_level;
359 current_binding_level = current_binding_level->level_chain;
360 level->level_chain = free_binding_level;
361 free_binding_level = level;
362 }
363
364 if (functionbody)
365 {
366 /* This is the top level block of a function. The ..._DECL chain stored
367 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
368 leave them in the BLOCK because they are found in the FUNCTION_DECL
369 instead. */
370 DECL_INITIAL (current_function_decl) = block;
371 BLOCK_VARS (block) = 0;
372 }
373 else if (block)
374 {
375 if (!block_previously_created)
376 current_binding_level->blocks
377 = chainon (current_binding_level->blocks, block);
378 }
379
380 /* If we did not make a block for the level just exited, any blocks made for
381 inner levels (since they cannot be recorded as subblocks in that level)
382 must be carried forward so they will later become subblocks of something
383 else. */
384 else if (subblock_chain)
385 current_binding_level->blocks
386 = chainon (current_binding_level->blocks, subblock_chain);
387 if (block)
388 TREE_USED (block) = 1;
389
390 return block;
391 }
392 \f
393 /* Insert BLOCK at the end of the list of subblocks of the
394 current binding level. This is used when a BIND_EXPR is expanded,
395 to handle the BLOCK node inside the BIND_EXPR. */
396
397 void
398 insert_block (block)
399 tree block;
400 {
401 TREE_USED (block) = 1;
402 current_binding_level->blocks
403 = chainon (current_binding_level->blocks, block);
404 }
405
406 /* Set the BLOCK node for the innermost scope
407 (the one we are currently in). */
408
409 void
410 set_block (block)
411 tree block;
412 {
413 current_binding_level->this_block = block;
414 current_binding_level->names = chainon (current_binding_level->names,
415 BLOCK_VARS (block));
416 current_binding_level->blocks = chainon (current_binding_level->blocks,
417 BLOCK_SUBBLOCKS (block));
418 }
419
420 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
421 Returns the ..._DECL node. */
422
423 tree
424 pushdecl (decl)
425 tree decl;
426 {
427 struct binding_level *b;
428
429 /* If at top level, there is no context. But PARM_DECLs always go in the
430 level of its function. */
431 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
432 {
433 b = global_binding_level;
434 DECL_CONTEXT (decl) = 0;
435 }
436 else
437 {
438 b = current_binding_level;
439 DECL_CONTEXT (decl) = current_function_decl;
440 }
441
442 /* Put the declaration on the list. The list of declarations is in reverse
443 order. The list will be reversed later if necessary. This needs to be
444 this way for compatibility with the back-end.
445
446 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
447 will cause trouble with the debugger and aren't needed anyway. */
448 if (TREE_CODE (decl) != TYPE_DECL
449 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
450 {
451 TREE_CHAIN (decl) = b->names;
452 b->names = decl;
453 }
454
455 /* For the declaration of a type, set its name if it either is not already
456 set, was set to an IDENTIFIER_NODE, indicating an internal name,
457 or if the previous type name was not derived from a source name.
458 We'd rather have the type named with a real name and all the pointer
459 types to the same object have the same POINTER_TYPE node. Code in this
460 function in c-decl.c makes a copy of the type node here, but that may
461 cause us trouble with incomplete types, so let's not try it (at least
462 for now). */
463
464 if (TREE_CODE (decl) == TYPE_DECL
465 && DECL_NAME (decl) != 0
466 && (TYPE_NAME (TREE_TYPE (decl)) == 0
467 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
468 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
469 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
470 && ! DECL_ARTIFICIAL (decl))))
471 TYPE_NAME (TREE_TYPE (decl)) = decl;
472
473 return decl;
474 }
475 \f
476 /* Do little here. Set up the standard declarations later after the
477 front end has been run. */
478
479 void
480 gnat_init_decl_processing ()
481 {
482 lineno = 0;
483
484 /* Make the binding_level structure for global names. */
485 current_function_decl = 0;
486 current_binding_level = 0;
487 free_binding_level = 0;
488 pushlevel (0);
489 global_binding_level = current_binding_level;
490
491 build_common_tree_nodes (0);
492
493 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
494 corresponding to the size of ptr_mode. Make this here since we need
495 this before we can expand the GNAT types. */
496 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (ptr_mode), 0));
497 build_common_tree_nodes_2 (0);
498
499 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
500
501 /* We need to make the integer type before doing anything else.
502 We stitch this in to the appropriate GNAT type later. */
503 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
504 integer_type_node));
505 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
506 char_type_node));
507
508 ptr_void_type_node = build_pointer_type (void_type_node);
509
510 }
511
512 /* Create the predefined scalar types such as `integer_type_node' needed
513 in the gcc back-end and initialize the global binding level. */
514
515 void
516 init_gigi_decls (long_long_float_type, exception_type)
517 tree long_long_float_type, exception_type;
518 {
519 tree endlink, decl;
520 unsigned int i;
521
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
524 char type. */
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
526 {
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
534 }
535 else
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
537
538 except_type_node = TREE_TYPE (exception_type);
539
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
543
544 void_type_decl_node
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
546 void_type_node));
547
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
550
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
553
554 /* malloc is a function declaration tree for a function to allocate
555 memory. */
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
557 NULL_TREE,
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
560 sizetype,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0);
563
564 /* free is a function declaration tree for a function to free memory. */
565
566 free_decl
567 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
568 build_function_type (void_type_node,
569 tree_cons (NULL_TREE,
570 ptr_void_type_node,
571 endlink)),
572 NULL_TREE, 0, 1, 1, 0);
573
574 /* Make the types and functions used for exception processing. */
575 jmpbuf_type
576 = build_array_type (gnat_type_for_mode (Pmode, 0),
577 build_index_type (build_int_2 (5, 0)));
578 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
580
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
582 get_jmpbuf_decl
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0);
587
588 set_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
591 NULL_TREE,
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0);
595
596 /* Function to get the current exception. */
597 get_excptr_decl
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
600 NULL_TREE,
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0);
603
604 /* Functions that raise exceptions. */
605 raise_nodefer_decl
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
611 endlink)),
612 NULL_TREE, 0, 1, 1, 0);
613
614 /* If in no exception handlers mode, all raise statements are redirected to
615 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
616 this procedure will never be called in this mode. */
617 if (No_Exception_Handlers_Set ())
618 {
619 decl
620 = create_subprog_decl
621 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
622 build_function_type (void_type_node,
623 tree_cons (NULL_TREE,
624 build_pointer_type (char_type_node),
625 tree_cons (NULL_TREE,
626 integer_type_node,
627 endlink))),
628 NULL_TREE, 0, 1, 1, 0);
629
630 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0];
631 i++)
632 gnat_raise_decls[i] = decl;
633 }
634 else
635 /* Otherwise, make one decl for each exception reason. */
636 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
637 {
638 char name[17];
639
640 sprintf (name, "__gnat_rcheck_%.2d", i);
641 gnat_raise_decls[i]
642 = create_subprog_decl
643 (get_identifier (name), NULL_TREE,
644 build_function_type (void_type_node,
645 tree_cons (NULL_TREE,
646 build_pointer_type
647 (char_type_node),
648 tree_cons (NULL_TREE,
649 integer_type_node,
650 endlink))),
651 NULL_TREE, 0, 1, 1, 0);
652 }
653
654 /* Indicate that these never return. */
655 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
656 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
657 TREE_TYPE (raise_nodefer_decl)
658 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
659 TYPE_QUAL_VOLATILE);
660
661 for (i = 0; i < sizeof gnat_raise_decls / sizeof gnat_raise_decls[0]; i++)
662 {
663 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
664 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
665 TREE_TYPE (gnat_raise_decls[i])
666 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
667 TYPE_QUAL_VOLATILE);
668 }
669
670 /* setjmp returns an integer and has one operand, which is a pointer to
671 a jmpbuf. */
672 setjmp_decl
673 = create_subprog_decl
674 (get_identifier ("setjmp"), NULL_TREE,
675 build_function_type (integer_type_node,
676 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
677 NULL_TREE, 0, 1, 1, 0);
678
679 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
680 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
681
682 main_identifier_node = get_identifier ("main");
683
684 ggc_add_tree_root (gnat_std_decls, ARRAY_SIZE (gnat_std_decls));
685 ggc_add_tree_root (gnat_raise_decls, ARRAY_SIZE (gnat_raise_decls));
686 }
687 \f
688 /* This routine is called in tree.c to print an error message for invalid use
689 of an incomplete type. */
690
691 void
692 incomplete_type_error (dont_care_1, dont_care_2)
693 tree dont_care_1 ATTRIBUTE_UNUSED;
694 tree dont_care_2 ATTRIBUTE_UNUSED;
695 {
696 gigi_abort (404);
697 }
698
699 /* This function is called indirectly from toplev.c to handle incomplete
700 declarations, i.e. VAR_DECL nodes whose DECL_SIZE is zero. To be precise,
701 compile_file in toplev.c makes an indirect call through the function pointer
702 incomplete_decl_finalize_hook which is initialized to this routine in
703 init_decl_processing. */
704
705 void
706 gnat_finish_incomplete_decl (dont_care)
707 tree dont_care ATTRIBUTE_UNUSED;
708 {
709 gigi_abort (405);
710 }
711 \f
712 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
713 nodes (FIELDLIST), finish constructing the record or union type.
714 If HAS_REP is nonzero, this record has a rep clause; don't call
715 layout_type but merely set the size and alignment ourselves.
716 If DEFER_DEBUG is nonzero, do not call the debugging routines
717 on this type; it will be done later. */
718
719 void
720 finish_record_type (record_type, fieldlist, has_rep, defer_debug)
721 tree record_type;
722 tree fieldlist;
723 int has_rep;
724 int defer_debug;
725 {
726 enum tree_code code = TREE_CODE (record_type);
727 tree ada_size = bitsize_zero_node;
728 tree size = bitsize_zero_node;
729 tree size_unit = size_zero_node;
730 int var_size = 0;
731 tree field;
732
733 TYPE_FIELDS (record_type) = fieldlist;
734
735 if (TYPE_NAME (record_type) != 0
736 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
737 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
738 else
739 TYPE_STUB_DECL (record_type)
740 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
741 record_type));
742
743 /* We don't need both the typedef name and the record name output in
744 the debugging information, since they are the same. */
745 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
746
747 /* Globally initialize the record first. If this is a rep'ed record,
748 that just means some initializations; otherwise, layout the record. */
749
750 if (has_rep)
751 {
752 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
753 TYPE_MODE (record_type) = BLKmode;
754 if (TYPE_SIZE (record_type) == 0)
755 {
756 TYPE_SIZE (record_type) = bitsize_zero_node;
757 TYPE_SIZE_UNIT (record_type) = size_zero_node;
758 }
759 }
760 else
761 {
762 /* Ensure there isn't a size already set. There can be in an error
763 case where there is a rep clause but all fields have errors and
764 no longer have a position. */
765 TYPE_SIZE (record_type) = 0;
766 layout_type (record_type);
767 }
768
769 /* At this point, the position and size of each field is known. It was
770 either set before entry by a rep clause, or by laying out the type
771 above. We now make a pass through the fields (in reverse order for
772 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
773 (for rep'ed records that are not padding types); and the mode (for
774 rep'ed records). */
775
776 if (code == QUAL_UNION_TYPE)
777 fieldlist = nreverse (fieldlist);
778
779 for (field = fieldlist; field; field = TREE_CHAIN (field))
780 {
781 tree type = TREE_TYPE (field);
782 tree this_size = DECL_SIZE (field);
783 tree this_size_unit = DECL_SIZE_UNIT (field);
784 tree this_ada_size = DECL_SIZE (field);
785
786 /* We need to make an XVE/XVU record if any field has variable size,
787 whether or not the record does. For example, if we have an union,
788 it may be that all fields, rounded up to the alignment, have the
789 same size, in which case we'll use that size. But the debug
790 output routines (except Dwarf2) won't be able to output the fields,
791 so we need to make the special record. */
792 if (TREE_CODE (this_size) != INTEGER_CST)
793 var_size = 1;
794
795 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
796 || TREE_CODE (type) == QUAL_UNION_TYPE)
797 && ! TYPE_IS_FAT_POINTER_P (type)
798 && ! TYPE_CONTAINS_TEMPLATE_P (type)
799 && TYPE_ADA_SIZE (type) != 0)
800 this_ada_size = TYPE_ADA_SIZE (type);
801
802 if (has_rep && ! DECL_BIT_FIELD (field))
803 TYPE_ALIGN (record_type)
804 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
805
806 switch (code)
807 {
808 case UNION_TYPE:
809 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
810 size = size_binop (MAX_EXPR, size, this_size);
811 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
812 break;
813
814 case QUAL_UNION_TYPE:
815 ada_size
816 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
817 this_ada_size, ada_size));
818 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
819 this_size, size));
820 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
821 this_size_unit, size_unit));
822 break;
823
824 case RECORD_TYPE:
825 /* Since we know here that all fields are sorted in order of
826 increasing bit position, the size of the record is one
827 higher than the ending bit of the last field processed
828 unless we have a rep clause, since in that case we might
829 have a field outside a QUAL_UNION_TYPE that has a higher ending
830 position. So use a MAX in that case. Also, if this field is a
831 QUAL_UNION_TYPE, we need to take into account the previous size in
832 the case of empty variants. */
833 ada_size
834 = merge_sizes (ada_size, bit_position (field), this_ada_size,
835 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
836 size = merge_sizes (size, bit_position (field), this_size,
837 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
838 size_unit
839 = merge_sizes (size_unit, byte_position (field), this_size_unit,
840 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
841 break;
842
843 default:
844 abort ();
845 }
846 }
847
848 if (code == QUAL_UNION_TYPE)
849 nreverse (fieldlist);
850
851 /* If this is a padding record, we never want to make the size smaller than
852 what was specified in it, if any. */
853 if (TREE_CODE (record_type) == RECORD_TYPE
854 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
855 {
856 size = TYPE_SIZE (record_type);
857 size_unit = TYPE_SIZE_UNIT (record_type);
858 }
859
860 /* Now set any of the values we've just computed that apply. */
861 if (! TYPE_IS_FAT_POINTER_P (record_type)
862 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
863 TYPE_ADA_SIZE (record_type) = ada_size;
864
865 #ifdef ROUND_TYPE_SIZE
866 size = ROUND_TYPE_SIZE (record_type, size, TYPE_ALIGN (record_type));
867 size_unit = ROUND_TYPE_SIZE_UNIT (record_size, size_unit,
868 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
869 #else
870 size = round_up (size, TYPE_ALIGN (record_type));
871 size_unit = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
872 #endif
873
874 if (has_rep
875 && ! (TREE_CODE (record_type) == RECORD_TYPE
876 && TYPE_IS_PADDING_P (record_type)
877 && TREE_CODE (size) != INTEGER_CST
878 && contains_placeholder_p (size)))
879 {
880 TYPE_SIZE (record_type) = size;
881 TYPE_SIZE_UNIT (record_type) = size_unit;
882 }
883
884 if (has_rep)
885 compute_record_mode (record_type);
886
887 if (! defer_debug)
888 {
889 /* If this record is of variable size, rename it so that the
890 debugger knows it is and make a new, parallel, record
891 that tells the debugger how the record is laid out. See
892 exp_dbug.ads. */
893 if (var_size)
894 {
895 tree new_record_type
896 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
897 ? UNION_TYPE : TREE_CODE (record_type));
898 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
899 tree new_id
900 = concat_id_with_name (orig_id,
901 TREE_CODE (record_type) == QUAL_UNION_TYPE
902 ? "XVU" : "XVE");
903 tree last_pos = bitsize_zero_node;
904 tree old_field;
905
906 TYPE_NAME (new_record_type) = new_id;
907 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
908 TYPE_STUB_DECL (new_record_type)
909 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
910 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
911 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
912 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
913 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
914
915 /* Now scan all the fields, replacing each field with a new
916 field corresponding to the new encoding. */
917 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
918 old_field = TREE_CHAIN (old_field))
919 {
920 tree field_type = TREE_TYPE (old_field);
921 tree field_name = DECL_NAME (old_field);
922 tree new_field;
923 tree curpos = bit_position (old_field);
924 int var = 0;
925 unsigned int align = 0;
926 tree pos;
927
928 /* See how the position was modified from the last position.
929
930 There are two basic cases we support: a value was added
931 to the last position or the last position was rounded to
932 a boundary and they something was added. Check for the
933 first case first. If not, see if there is any evidence
934 of rounding. If so, round the last position and try
935 again.
936
937 If this is a union, the position can be taken as zero. */
938
939 if (TREE_CODE (new_record_type) == UNION_TYPE)
940 pos = bitsize_zero_node, align = 0;
941 else
942 pos = compute_related_constant (curpos, last_pos);
943
944 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
945 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
946 {
947 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
948 pos = compute_related_constant (curpos,
949 round_up (last_pos, align));
950 }
951 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
952 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
953 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
954 && host_integerp (TREE_OPERAND
955 (TREE_OPERAND (curpos, 0), 1),
956 1))
957 {
958 align
959 = tree_low_cst
960 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
961 pos = compute_related_constant (curpos,
962 round_up (last_pos, align));
963 }
964
965 /* If we can't compute a position, set it to zero.
966
967 ??? We really should abort here, but it's too much work
968 to get this correct for all cases. */
969
970 if (pos == 0)
971 pos = bitsize_zero_node;
972
973 /* See if this type is variable-size and make a new type
974 and indicate the indirection if so. */
975 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
976 {
977 field_type = build_pointer_type (field_type);
978 var = 1;
979 }
980
981 /* Make a new field name, if necessary. */
982 if (var || align != 0)
983 {
984 char suffix[6];
985
986 if (align != 0)
987 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
988 align / BITS_PER_UNIT);
989 else
990 strcpy (suffix, "XVL");
991
992 field_name = concat_id_with_name (field_name, suffix);
993 }
994
995 new_field = create_field_decl (field_name, field_type,
996 new_record_type, 0,
997 DECL_SIZE (old_field), pos, 0);
998 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
999 TYPE_FIELDS (new_record_type) = new_field;
1000
1001 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1002 zero. The only time it's not the last field of the record
1003 is when there are other components at fixed positions after
1004 it (meaning there was a rep clause for every field) and we
1005 want to be able to encode them. */
1006 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1007 (TREE_CODE (TREE_TYPE (old_field))
1008 == QUAL_UNION_TYPE)
1009 ? bitsize_zero_node
1010 : DECL_SIZE (old_field));
1011 }
1012
1013 TYPE_FIELDS (new_record_type)
1014 = nreverse (TYPE_FIELDS (new_record_type));
1015
1016 rest_of_type_compilation (new_record_type, global_bindings_p ());
1017 }
1018
1019 rest_of_type_compilation (record_type, global_bindings_p ());
1020 }
1021 }
1022
1023 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1024 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1025 if this represents a QUAL_UNION_TYPE in which case we must look for
1026 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1027 is nonzero, we must take the MAX of the end position of this field
1028 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1029
1030 We return an expression for the size. */
1031
1032 static tree
1033 merge_sizes (last_size, first_bit, size, special, has_rep)
1034 tree last_size;
1035 tree first_bit, size;
1036 int special;
1037 int has_rep;
1038 {
1039 tree type = TREE_TYPE (last_size);
1040
1041 if (! special || TREE_CODE (size) != COND_EXPR)
1042 {
1043 tree new = size_binop (PLUS_EXPR, first_bit, size);
1044
1045 if (has_rep)
1046 new = size_binop (MAX_EXPR, last_size, new);
1047
1048 return new;
1049 }
1050
1051 return fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1052 integer_zerop (TREE_OPERAND (size, 1))
1053 ? last_size : merge_sizes (last_size, first_bit,
1054 TREE_OPERAND (size, 1),
1055 1, has_rep),
1056 integer_zerop (TREE_OPERAND (size, 2))
1057 ? last_size : merge_sizes (last_size, first_bit,
1058 TREE_OPERAND (size, 2),
1059 1, has_rep)));
1060 }
1061
1062 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1063 related by the addition of a constant. Return that constant if so. */
1064
1065 static tree
1066 compute_related_constant (op0, op1)
1067 tree op0, op1;
1068 {
1069 tree op0_var, op1_var;
1070 tree op0_con = split_plus (op0, &op0_var);
1071 tree op1_con = split_plus (op1, &op1_var);
1072 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1073
1074 if (operand_equal_p (op0_var, op1_var, 0))
1075 return result;
1076 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1077 return result;
1078 else
1079 return 0;
1080 }
1081
1082 /* Utility function of above to split a tree OP which may be a sum, into a
1083 constant part, which is returned, and a variable part, which is stored
1084 in *PVAR. *PVAR may be size_zero_node. All operations must be of
1085 sizetype. */
1086
1087 static tree
1088 split_plus (in, pvar)
1089 tree in;
1090 tree *pvar;
1091 {
1092 tree result = bitsize_zero_node;
1093
1094 while (TREE_CODE (in) == NON_LVALUE_EXPR)
1095 in = TREE_OPERAND (in, 0);
1096
1097 *pvar = in;
1098 if (TREE_CODE (in) == INTEGER_CST)
1099 {
1100 *pvar = bitsize_zero_node;
1101 return in;
1102 }
1103 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1104 {
1105 tree lhs_var, rhs_var;
1106 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1107 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1108
1109 result = size_binop (PLUS_EXPR, result, lhs_con);
1110 result = size_binop (TREE_CODE (in), result, rhs_con);
1111
1112 if (lhs_var == TREE_OPERAND (in, 0)
1113 && rhs_var == TREE_OPERAND (in, 1))
1114 return bitsize_zero_node;
1115
1116 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1117 return result;
1118 }
1119 else
1120 return bitsize_zero_node;
1121 }
1122 \f
1123 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1124 subprogram. If it is void_type_node, then we are dealing with a procedure,
1125 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1126 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1127 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1128 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1129 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1130 RETURNS_WITH_DSP is nonzero if the function is to return with a
1131 depressed stack pointer. */
1132
1133 tree
1134 create_subprog_type (return_type, param_decl_list, cico_list,
1135 returns_unconstrained, returns_by_ref, returns_with_dsp)
1136 tree return_type;
1137 tree param_decl_list;
1138 tree cico_list;
1139 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1140 {
1141 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1142 the subprogram formal parameters. This list is generated by traversing the
1143 input list of PARM_DECL nodes. */
1144 tree param_type_list = NULL;
1145 tree param_decl;
1146 tree type;
1147
1148 for (param_decl = param_decl_list; param_decl;
1149 param_decl = TREE_CHAIN (param_decl))
1150 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1151 param_type_list);
1152
1153 /* The list of the function parameter types has to be terminated by the void
1154 type to signal to the back-end that we are not dealing with a variable
1155 parameter subprogram, but that the subprogram has a fixed number of
1156 parameters. */
1157 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1158
1159 /* The list of argument types has been created in reverse
1160 so nreverse it. */
1161 param_type_list = nreverse (param_type_list);
1162
1163 type = build_function_type (return_type, param_type_list);
1164
1165 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1166 or the new type should, make a copy of TYPE. Likewise for
1167 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1168 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1169 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1170 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1171 type = copy_type (type);
1172
1173 TYPE_CI_CO_LIST (type) = cico_list;
1174 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1175 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1176 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1177 return type;
1178 }
1179 \f
1180 /* Return a copy of TYPE but safe to modify in any way. */
1181
1182 tree
1183 copy_type (type)
1184 tree type;
1185 {
1186 tree new = copy_node (type);
1187
1188 /* copy_node clears this field instead of copying it, because it is
1189 aliased with TREE_CHAIN. */
1190 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1191
1192 TYPE_POINTER_TO (new) = 0;
1193 TYPE_REFERENCE_TO (new) = 0;
1194 TYPE_MAIN_VARIANT (new) = new;
1195 TYPE_NEXT_VARIANT (new) = 0;
1196
1197 return new;
1198 }
1199 \f
1200 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1201 TYPE_INDEX_TYPE is INDEX. */
1202
1203 tree
1204 create_index_type (min, max, index)
1205 tree min, max;
1206 tree index;
1207 {
1208 /* First build a type for the desired range. */
1209 tree type = build_index_2_type (min, max);
1210
1211 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1212 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1213 is set, but not to INDEX, make a copy of this type with the requested
1214 index type. Note that we have no way of sharing these types, but that's
1215 only a small hole. */
1216 if (TYPE_INDEX_TYPE (type) == index)
1217 return type;
1218 else if (TYPE_INDEX_TYPE (type) != 0)
1219 type = copy_type (type);
1220
1221 TYPE_INDEX_TYPE (type) = index;
1222 return type;
1223 }
1224 \f
1225 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1226 string) and TYPE is a ..._TYPE node giving its data type.
1227 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1228 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1229 information about this type. */
1230
1231 tree
1232 create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1233 tree type_name;
1234 tree type;
1235 struct attrib *attr_list;
1236 int artificial_p;
1237 int debug_info_p;
1238 {
1239 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1240 enum tree_code code = TREE_CODE (type);
1241
1242 DECL_ARTIFICIAL (type_decl) = artificial_p;
1243 pushdecl (type_decl);
1244 process_attributes (type_decl, attr_list);
1245
1246 /* Pass type declaration information to the debugger unless this is an
1247 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1248 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1249 a dummy type, which will be completed later, or a type for which
1250 debugging information was not requested. */
1251 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1252 || ! debug_info_p)
1253 DECL_IGNORED_P (type_decl) = 1;
1254 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1255 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1256 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1257 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1258
1259 return type_decl;
1260 }
1261
1262 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1263 ASM_NAME is its assembler name (if provided). TYPE is its data type
1264 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1265 expression; NULL_TREE if none.
1266
1267 CONST_FLAG is nonzero if this variable is constant.
1268
1269 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1270 the current compilation unit. This flag should be set when processing the
1271 variable definitions in a package specification. EXTERN_FLAG is nonzero
1272 when processing an external variable declaration (as opposed to a
1273 definition: no storage is to be allocated for the variable here).
1274
1275 STATIC_FLAG is only relevant when not at top level. In that case
1276 it indicates whether to always allocate storage to the variable. */
1277
1278 tree
1279 create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1280 extern_flag, static_flag, attr_list)
1281 tree var_name;
1282 tree asm_name;
1283 tree type;
1284 tree var_init;
1285 int const_flag;
1286 int public_flag;
1287 int extern_flag;
1288 int static_flag;
1289 struct attrib *attr_list;
1290 {
1291 int init_const
1292 = (var_init == 0
1293 ? 0
1294 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1295 && (global_bindings_p () || static_flag
1296 ? 0 != initializer_constant_valid_p (var_init,
1297 TREE_TYPE (var_init))
1298 : TREE_CONSTANT (var_init))));
1299 tree var_decl
1300 = build_decl ((const_flag && init_const
1301 /* Only make a CONST_DECL for sufficiently-small objects.
1302 We consider complex double "sufficiently-small" */
1303 && TYPE_SIZE (type) != 0
1304 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1305 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1306 GET_MODE_SIZE (DCmode)))
1307 ? CONST_DECL : VAR_DECL, var_name, type);
1308 tree assign_init = 0;
1309
1310 /* If this is external, throw away any initializations unless this is a
1311 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1312 we are defining a global here, leave a constant initialization and save
1313 any variable elaborations for the elaboration routine. Otherwise, if
1314 the initializing expression is not the same as TYPE, generate the
1315 initialization with an assignment statement, since it knows how
1316 to do the required adjustents. If we are just annotating types,
1317 throw away the initialization if it isn't a constant. */
1318
1319 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1320 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1321 var_init = 0;
1322
1323 if (global_bindings_p () && var_init != 0 && ! init_const)
1324 {
1325 add_pending_elaborations (var_decl, var_init);
1326 var_init = 0;
1327 }
1328
1329 else if (var_init != 0
1330 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1331 != TYPE_MAIN_VARIANT (type))
1332 || (static_flag && ! init_const)))
1333 assign_init = var_init, var_init = 0;
1334
1335 DECL_COMMON (var_decl) = !flag_no_common;
1336 DECL_INITIAL (var_decl) = var_init;
1337 TREE_READONLY (var_decl) = const_flag;
1338 DECL_EXTERNAL (var_decl) = extern_flag;
1339 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1340 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1341 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1342 = TYPE_VOLATILE (type);
1343
1344 /* At the global binding level we need to allocate static storage for the
1345 variable if and only if its not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1348
1349 if (asm_name != 0)
1350 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1351
1352 process_attributes (var_decl, attr_list);
1353
1354 /* Add this decl to the current binding level and generate any
1355 needed code and RTL. */
1356 var_decl = pushdecl (var_decl);
1357 expand_decl (var_decl);
1358
1359 if (DECL_CONTEXT (var_decl) != 0)
1360 expand_decl_init (var_decl);
1361
1362 /* If this is volatile, force it into memory. */
1363 if (TREE_SIDE_EFFECTS (var_decl))
1364 mark_addressable (var_decl);
1365
1366 if (TREE_CODE (var_decl) != CONST_DECL)
1367 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1368
1369 if (assign_init != 0)
1370 {
1371 /* If VAR_DECL has a padded type, convert it to the unpadded
1372 type so the assignment is done properly. */
1373 tree lhs = var_decl;
1374
1375 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1376 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1377 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1378
1379 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1380 assign_init));
1381 }
1382
1383 return var_decl;
1384 }
1385 \f
1386 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1387 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1388 this field is in a record type with a "pragma pack". If SIZE is nonzero
1389 it is the specified size for this field. If POS is nonzero, it is the bit
1390 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1391 the address of this field for aliasing purposes. */
1392
1393 tree
1394 create_field_decl (field_name, field_type, record_type, packed, size, pos,
1395 addressable)
1396 tree field_name;
1397 tree field_type;
1398 tree record_type;
1399 int packed;
1400 tree size, pos;
1401 int addressable;
1402 {
1403 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1404
1405 DECL_CONTEXT (field_decl) = record_type;
1406 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1407
1408 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1409 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1410 If it is a padding type where the inner field is of variable size, it
1411 must be at its natural alignment. Just handle the packed case here; we
1412 will disallow non-aligned rep clauses elsewhere. */
1413 if (packed && TYPE_MODE (field_type) == BLKmode)
1414 DECL_ALIGN (field_decl)
1415 = ((TREE_CODE (field_type) == RECORD_TYPE
1416 && TYPE_IS_PADDING_P (field_type)
1417 && ! TREE_CONSTANT (DECL_SIZE (TYPE_FIELDS (field_type))))
1418 ? TYPE_ALIGN (field_type) : BITS_PER_UNIT);
1419
1420 /* If a size is specified, use it. Otherwise, see if we have a size
1421 to use that may differ from the natural size of the object. */
1422 if (size != 0)
1423 size = convert (bitsizetype, size);
1424 else if (packed)
1425 {
1426 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1427 TYPE_SIZE (field_type), 0))
1428 size = rm_size (field_type);
1429
1430 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1431 byte. */
1432 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1433 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1434 size = round_up (size, BITS_PER_UNIT);
1435 }
1436
1437 /* Make a bitfield if a size is specified for two reasons: first if the size
1438 differs from the natural size. Second, if the alignment is insufficient.
1439 There are a number of ways the latter can be true. But never make a
1440 bitfield if the type of the field has a nonconstant size. */
1441
1442 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1443 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1444 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1445 || (pos != 0
1446 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1447 bitsize_int (TYPE_ALIGN
1448 (field_type)))))
1449 || packed
1450 || (TYPE_ALIGN (record_type) != 0
1451 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1452 {
1453 DECL_BIT_FIELD (field_decl) = 1;
1454 DECL_SIZE (field_decl) = size;
1455 if (! packed && pos == 0)
1456 DECL_ALIGN (field_decl)
1457 = (TYPE_ALIGN (record_type) != 0
1458 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1459 : TYPE_ALIGN (field_type));
1460 }
1461
1462 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1463 DECL_ALIGN (field_decl)
1464 = MAX (DECL_ALIGN (field_decl),
1465 DECL_BIT_FIELD (field_decl) ? 1
1466 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1467 : TYPE_ALIGN (field_type));
1468
1469 if (pos != 0)
1470 {
1471 /* We need to pass in the alignment the DECL is known to have.
1472 This is the lowest-order bit set in POS, but no more than
1473 the alignment of the record, if one is specified. Note
1474 that an alignment of 0 is taken as infinite. */
1475 unsigned int known_align;
1476
1477 if (host_integerp (pos, 1))
1478 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1479 else
1480 known_align = BITS_PER_UNIT;
1481
1482 if (TYPE_ALIGN (record_type)
1483 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1484 known_align = TYPE_ALIGN (record_type);
1485
1486 layout_decl (field_decl, known_align);
1487 SET_DECL_OFFSET_ALIGN (field_decl,
1488 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1489 : BITS_PER_UNIT);
1490 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1491 &DECL_FIELD_BIT_OFFSET (field_decl),
1492 DECL_OFFSET_ALIGN (field_decl), pos);
1493
1494 DECL_HAS_REP_P (field_decl) = 1;
1495 }
1496
1497 /* If the field type is passed by reference, we will have pointers to the
1498 field, so it is addressable. */
1499 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1500 addressable = 1;
1501
1502 /* Mark the decl as nonaddressable if it either is indicated so semantically
1503 or if it is a bit field. */
1504 DECL_NONADDRESSABLE_P (field_decl)
1505 = ! addressable || DECL_BIT_FIELD (field_decl);
1506
1507 return field_decl;
1508 }
1509
1510 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1511 effects, has the value of zero. */
1512
1513 static int
1514 value_zerop (exp)
1515 tree exp;
1516 {
1517 if (TREE_CODE (exp) == COMPOUND_EXPR)
1518 return value_zerop (TREE_OPERAND (exp, 1));
1519
1520 return integer_zerop (exp);
1521 }
1522 \f
1523 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1524 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1525 readonly (either an IN parameter or an address of a pass-by-ref
1526 parameter). */
1527
1528 tree
1529 create_param_decl (param_name, param_type, readonly)
1530 tree param_name;
1531 tree param_type;
1532 int readonly;
1533 {
1534 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1535
1536 DECL_ARG_TYPE (param_decl) = param_type;
1537 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1538 TREE_READONLY (param_decl) = readonly;
1539 return param_decl;
1540 }
1541 \f
1542 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1543
1544 void
1545 process_attributes (decl, attr_list)
1546 tree decl;
1547 struct attrib *attr_list;
1548 {
1549 for (; attr_list; attr_list = attr_list->next)
1550 switch (attr_list->type)
1551 {
1552 case ATTR_MACHINE_ATTRIBUTE:
1553 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1554 NULL_TREE),
1555 ATTR_FLAG_TYPE_IN_PLACE);
1556 break;
1557
1558 case ATTR_LINK_ALIAS:
1559 TREE_STATIC (decl) = 1;
1560 assemble_alias (decl, attr_list->name);
1561 break;
1562
1563 case ATTR_WEAK_EXTERNAL:
1564 if (SUPPORTS_WEAK)
1565 declare_weak (decl);
1566 else
1567 post_error ("?weak declarations not supported on this target",
1568 attr_list->error_point);
1569 break;
1570
1571 case ATTR_LINK_SECTION:
1572 #ifdef ASM_OUTPUT_SECTION_NAME
1573 DECL_SECTION_NAME (decl)
1574 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1575 IDENTIFIER_POINTER (attr_list->name));
1576 DECL_COMMON (decl) = 0;
1577 #else
1578 post_error ("?section attributes are not supported for this target",
1579 attr_list->error_point);
1580 #endif
1581 break;
1582 }
1583 }
1584 \f
1585 /* Add some pending elaborations on the list. */
1586
1587 void
1588 add_pending_elaborations (var_decl, var_init)
1589 tree var_decl;
1590 tree var_init;
1591 {
1592 if (var_init != 0)
1593 Check_Elaboration_Code_Allowed (error_gnat_node);
1594
1595 pending_elaborations
1596 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1597 }
1598
1599 /* Obtain any pending elaborations and clear the old list. */
1600
1601 tree
1602 get_pending_elaborations ()
1603 {
1604 /* Each thing added to the list went on the end; we want it on the
1605 beginning. */
1606 tree result = TREE_CHAIN (pending_elaborations);
1607
1608 TREE_CHAIN (pending_elaborations) = 0;
1609 return result;
1610 }
1611
1612 /* Mark the binding level stack. */
1613
1614 static void
1615 mark_binding_level (arg)
1616 PTR arg;
1617 {
1618 struct binding_level *level = *(struct binding_level **) arg;
1619
1620 for (; level != 0; level = level->level_chain)
1621 {
1622 ggc_mark_tree (level->names);
1623 ggc_mark_tree (level->blocks);
1624 ggc_mark_tree (level->this_block);
1625 }
1626 }
1627
1628 /* Mark the pending elaboration list. */
1629
1630 static void
1631 mark_e_stack (data)
1632 PTR data;
1633 {
1634 struct e_stack *p = *((struct e_stack **) data);
1635
1636 if (p != 0)
1637 {
1638 ggc_mark_tree (p->elab_list);
1639 mark_e_stack (&p->next);
1640 }
1641 }
1642
1643 /* Return nonzero if there are pending elaborations. */
1644
1645 int
1646 pending_elaborations_p ()
1647 {
1648 return TREE_CHAIN (pending_elaborations) != 0;
1649 }
1650
1651 /* Save a copy of the current pending elaboration list and make a new
1652 one. */
1653
1654 void
1655 push_pending_elaborations ()
1656 {
1657 struct e_stack *p = (struct e_stack *) xmalloc (sizeof (struct e_stack));
1658
1659 p->next = elist_stack;
1660 p->elab_list = pending_elaborations;
1661 elist_stack = p;
1662 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1663 }
1664
1665 /* Pop the stack of pending elaborations. */
1666
1667 void
1668 pop_pending_elaborations ()
1669 {
1670 struct e_stack *p = elist_stack;
1671
1672 pending_elaborations = p->elab_list;
1673 elist_stack = p->next;
1674 free (p);
1675 }
1676
1677 /* Return the current position in pending_elaborations so we can insert
1678 elaborations after that point. */
1679
1680 tree
1681 get_elaboration_location ()
1682 {
1683 return tree_last (pending_elaborations);
1684 }
1685
1686 /* Insert the current elaborations after ELAB, which is in some elaboration
1687 list. */
1688
1689 void
1690 insert_elaboration_list (elab)
1691 tree elab;
1692 {
1693 tree next = TREE_CHAIN (elab);
1694
1695 if (TREE_CHAIN (pending_elaborations))
1696 {
1697 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1698 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1699 TREE_CHAIN (pending_elaborations) = 0;
1700 }
1701 }
1702
1703 /* Returns a LABEL_DECL node for LABEL_NAME. */
1704
1705 tree
1706 create_label_decl (label_name)
1707 tree label_name;
1708 {
1709 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1710
1711 DECL_CONTEXT (label_decl) = current_function_decl;
1712 DECL_MODE (label_decl) = VOIDmode;
1713 DECL_SOURCE_LINE (label_decl) = lineno;
1714 DECL_SOURCE_FILE (label_decl) = input_filename;
1715
1716 return label_decl;
1717 }
1718 \f
1719 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1720 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1721 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1722 PARM_DECL nodes chained through the TREE_CHAIN field).
1723
1724 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1725 appropriate fields in the FUNCTION_DECL. */
1726
1727 tree
1728 create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1729 inline_flag, public_flag, extern_flag, attr_list)
1730 tree subprog_name;
1731 tree asm_name;
1732 tree subprog_type;
1733 tree param_decl_list;
1734 int inline_flag;
1735 int public_flag;
1736 int extern_flag;
1737 struct attrib *attr_list;
1738 {
1739 tree return_type = TREE_TYPE (subprog_type);
1740 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1741
1742 /* If this is a function nested inside an inlined external function, it
1743 means we aren't going to compile the outer function unless it is
1744 actually inlined, so do the same for us. */
1745 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1746 && DECL_EXTERNAL (current_function_decl))
1747 extern_flag = 1;
1748
1749 DECL_EXTERNAL (subprog_decl) = extern_flag;
1750 TREE_PUBLIC (subprog_decl) = public_flag;
1751 DECL_INLINE (subprog_decl) = inline_flag;
1752 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1753 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1754 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1755 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1756 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1757
1758 if (asm_name != 0)
1759 DECL_ASSEMBLER_NAME (subprog_decl) = asm_name;
1760
1761 process_attributes (subprog_decl, attr_list);
1762
1763 /* Add this decl to the current binding level. */
1764 subprog_decl = pushdecl (subprog_decl);
1765
1766 /* Output the assembler code and/or RTL for the declaration. */
1767 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1768
1769 return subprog_decl;
1770 }
1771 \f
1772 /* Count how deep we are into nested functions. This is because
1773 we shouldn't call the backend function context routines unless we
1774 are in a nested function. */
1775
1776 static int function_nesting_depth;
1777
1778 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1779 body. This routine needs to be invoked before processing the declarations
1780 appearing in the subprogram. */
1781
1782 void
1783 begin_subprog_body (subprog_decl)
1784 tree subprog_decl;
1785 {
1786 tree param_decl_list;
1787 tree param_decl;
1788 tree next_param;
1789
1790 if (function_nesting_depth++ != 0)
1791 push_function_context ();
1792
1793 announce_function (subprog_decl);
1794
1795 /* Make this field nonzero so further routines know that this is not
1796 tentative. error_mark_node is replaced below (in poplevel) with the
1797 adequate BLOCK. */
1798 DECL_INITIAL (subprog_decl) = error_mark_node;
1799
1800 /* This function exists in static storage. This does not mean `static' in
1801 the C sense! */
1802 TREE_STATIC (subprog_decl) = 1;
1803
1804 /* Enter a new binding level. */
1805 current_function_decl = subprog_decl;
1806 pushlevel (0);
1807
1808 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1809 subprogram body) so that they can be recognized as local variables in the
1810 subprogram.
1811
1812 The list of PARM_DECL nodes is stored in the right order in
1813 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1814 which they are transmitted to `pushdecl' we need to reverse the list of
1815 PARM_DECLs if we want it to be stored in the right order. The reason why
1816 we want to make sure the PARM_DECLs are stored in the correct order is
1817 that this list will be retrieved in a few lines with a call to `getdecl'
1818 to store it back into the DECL_ARGUMENTS field. */
1819 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1820
1821 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1822 {
1823 next_param = TREE_CHAIN (param_decl);
1824 TREE_CHAIN (param_decl) = NULL;
1825 pushdecl (param_decl);
1826 }
1827
1828 /* Store back the PARM_DECL nodes. They appear in the right order. */
1829 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1830
1831 init_function_start (subprog_decl, input_filename, lineno);
1832 expand_function_start (subprog_decl, 0);
1833
1834 /* If this function is `main', emit a call to `__main'
1835 to run global initializers, etc. */
1836 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1837 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1838 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1839 expand_main_function ();
1840 }
1841
1842 /* Finish the definition of the current subprogram and compile it all the way
1843 to assembler language output. */
1844
1845 void
1846 end_subprog_body ()
1847 {
1848 tree decl;
1849 tree cico_list;
1850
1851 poplevel (1, 0, 1);
1852 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1853 = current_function_decl;
1854
1855 /* Mark the RESULT_DECL as being in this subprogram. */
1856 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1857
1858 expand_function_end (input_filename, lineno, 0);
1859
1860 /* If this is a nested function, push a new GC context. That will keep
1861 local variables on the stack from being collected while we're doing
1862 the compilation of this function. */
1863 if (function_nesting_depth > 1)
1864 ggc_push_context ();
1865
1866 rest_of_compilation (current_function_decl);
1867
1868 if (function_nesting_depth > 1)
1869 ggc_pop_context ();
1870
1871 #if 0
1872 /* If we're sure this function is defined in this file then mark it
1873 as such */
1874 if (TREE_ASM_WRITTEN (current_function_decl))
1875 mark_fn_defined_in_this_file (current_function_decl);
1876 #endif
1877
1878 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1879 not be seen when we call this function and will be in
1880 unallocated memory anyway. */
1881 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1882 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1883 TREE_VALUE (cico_list) = 0;
1884
1885 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1886 {
1887 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1888 was saved for inline, in which case the DECL_RTLs are in
1889 preserved memory. */
1890 for (decl = DECL_ARGUMENTS (current_function_decl);
1891 decl != 0; decl = TREE_CHAIN (decl))
1892 {
1893 SET_DECL_RTL (decl, 0);
1894 DECL_INCOMING_RTL (decl) = 0;
1895 }
1896
1897 /* Similarly, discard DECL_RTL of the return value. */
1898 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1899
1900 /* But DECL_INITIAL must remain nonzero so we know this
1901 was an actual function definition unless toplev.c decided not
1902 to inline it. */
1903 if (DECL_INITIAL (current_function_decl) != 0)
1904 DECL_INITIAL (current_function_decl) = error_mark_node;
1905
1906 DECL_ARGUMENTS (current_function_decl) = 0;
1907 }
1908
1909 /* If we are not at the bottom of the function nesting stack, pop up to
1910 the containing function. Otherwise show we aren't in any function. */
1911 if (--function_nesting_depth != 0)
1912 pop_function_context ();
1913 else
1914 current_function_decl = 0;
1915 }
1916 \f
1917 /* Return a definition for a builtin function named NAME and whose data type
1918 is TYPE. TYPE should be a function type with argument types.
1919 FUNCTION_CODE tells later passes how to compile calls to this function.
1920 See tree.h for its possible values.
1921
1922 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1923 the name to be called if we can't opencode the function. */
1924
1925 tree
1926 builtin_function (name, type, function_code, class, library_name)
1927 const char *name;
1928 tree type;
1929 int function_code;
1930 enum built_in_class class;
1931 const char *library_name;
1932 {
1933 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1934
1935 DECL_EXTERNAL (decl) = 1;
1936 TREE_PUBLIC (decl) = 1;
1937 if (library_name)
1938 DECL_ASSEMBLER_NAME (decl) = get_identifier (library_name);
1939
1940 pushdecl (decl);
1941 DECL_BUILT_IN_CLASS (decl) = class;
1942 DECL_FUNCTION_CODE (decl) = function_code;
1943 return decl;
1944 }
1945
1946 /* Return an integer type with the number of bits of precision given by
1947 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1948 it is a signed type. */
1949
1950 tree
1951 gnat_type_for_size (precision, unsignedp)
1952 unsigned precision;
1953 int unsignedp;
1954 {
1955 tree t;
1956 char type_name[20];
1957
1958 if (precision <= 2 * MAX_BITS_PER_WORD
1959 && signed_and_unsigned_types[precision][unsignedp] != 0)
1960 return signed_and_unsigned_types[precision][unsignedp];
1961
1962 if (unsignedp)
1963 t = make_unsigned_type (precision);
1964 else
1965 t = make_signed_type (precision);
1966
1967 if (precision <= 2 * MAX_BITS_PER_WORD)
1968 signed_and_unsigned_types[precision][unsignedp] = t;
1969
1970 if (TYPE_NAME (t) == 0)
1971 {
1972 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1973 TYPE_NAME (t) = get_identifier (type_name);
1974 }
1975
1976 return t;
1977 }
1978
1979 /* Likewise for floating-point types. */
1980
1981 static tree
1982 float_type_for_size (precision, mode)
1983 int precision;
1984 enum machine_mode mode;
1985 {
1986 tree t;
1987 char type_name[20];
1988
1989 if (float_types[(int) mode] != 0)
1990 return float_types[(int) mode];
1991
1992 float_types[(int) mode] = t = make_node (REAL_TYPE);
1993 TYPE_PRECISION (t) = precision;
1994 layout_type (t);
1995
1996 if (TYPE_MODE (t) != mode)
1997 gigi_abort (414);
1998
1999 if (TYPE_NAME (t) == 0)
2000 {
2001 sprintf (type_name, "FLOAT_%d", precision);
2002 TYPE_NAME (t) = get_identifier (type_name);
2003 }
2004
2005 return t;
2006 }
2007
2008 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2009 an unsigned type; otherwise a signed type is returned. */
2010
2011 tree
2012 gnat_type_for_mode (mode, unsignedp)
2013 enum machine_mode mode;
2014 int unsignedp;
2015 {
2016 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2017 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2018 else
2019 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2020 }
2021
2022 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2023
2024 tree
2025 unsigned_type (type_node)
2026 tree type_node;
2027 {
2028 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2029
2030 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2031 {
2032 type = copy_node (type);
2033 TREE_TYPE (type) = type_node;
2034 }
2035 else if (TREE_TYPE (type_node) != 0
2036 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2037 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2038 {
2039 type = copy_node (type);
2040 TREE_TYPE (type) = TREE_TYPE (type_node);
2041 }
2042
2043 return type;
2044 }
2045
2046 /* Return the signed version of a TYPE_NODE, a scalar type. */
2047
2048 tree
2049 signed_type (type_node)
2050 tree type_node;
2051 {
2052 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2053
2054 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2055 {
2056 type = copy_node (type);
2057 TREE_TYPE (type) = type_node;
2058 }
2059 else if (TREE_TYPE (type_node) != 0
2060 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2061 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2062 {
2063 type = copy_node (type);
2064 TREE_TYPE (type) = TREE_TYPE (type_node);
2065 }
2066
2067 return type;
2068 }
2069
2070 /* Return a type the same as TYPE except unsigned or signed according to
2071 UNSIGNEDP. */
2072
2073 tree
2074 signed_or_unsigned_type (unsignedp, type)
2075 int unsignedp;
2076 tree type;
2077 {
2078 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2079 return type;
2080 else
2081 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2082 }
2083 \f
2084 /* EXP is an expression for the size of an object. If this size contains
2085 discriminant references, replace them with the maximum (if MAX_P) or
2086 minimum (if ! MAX_P) possible value of the discriminant. */
2087
2088 tree
2089 max_size (exp, max_p)
2090 tree exp;
2091 int max_p;
2092 {
2093 enum tree_code code = TREE_CODE (exp);
2094 tree type = TREE_TYPE (exp);
2095
2096 switch (TREE_CODE_CLASS (code))
2097 {
2098 case 'd':
2099 case 'c':
2100 return exp;
2101
2102 case 'x':
2103 if (code == TREE_LIST)
2104 return tree_cons (TREE_PURPOSE (exp),
2105 max_size (TREE_VALUE (exp), max_p),
2106 TREE_CHAIN (exp) != 0
2107 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2108 break;
2109
2110 case 'r':
2111 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2112 modify. Otherwise, we abort since it is something we can't
2113 handle. */
2114 if (! contains_placeholder_p (exp))
2115 gigi_abort (406);
2116
2117 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2118 return
2119 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2120
2121 case '<':
2122 return max_p ? size_one_node : size_zero_node;
2123
2124 case '1':
2125 case '2':
2126 case 'e':
2127 switch (TREE_CODE_LENGTH (code))
2128 {
2129 case 1:
2130 if (code == NON_LVALUE_EXPR)
2131 return max_size (TREE_OPERAND (exp, 0), max_p);
2132 else
2133 return
2134 fold (build1 (code, type,
2135 max_size (TREE_OPERAND (exp, 0),
2136 code == NEGATE_EXPR ? ! max_p : max_p)));
2137
2138 case 2:
2139 if (code == RTL_EXPR)
2140 gigi_abort (407);
2141 else if (code == COMPOUND_EXPR)
2142 return max_size (TREE_OPERAND (exp, 1), max_p);
2143 else if (code == WITH_RECORD_EXPR)
2144 return exp;
2145
2146 {
2147 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2148 tree rhs = max_size (TREE_OPERAND (exp, 1),
2149 code == MINUS_EXPR ? ! max_p : max_p);
2150
2151 /* Special-case wanting the maximum value of a MIN_EXPR.
2152 In that case, if one side overflows, return the other.
2153 sizetype is signed, but we know sizes are non-negative.
2154 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2155 overflowing or the maximum possible value and the RHS
2156 a variable. */
2157 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2158 return lhs;
2159 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2160 return rhs;
2161 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2162 && (TREE_OVERFLOW (lhs)
2163 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2164 && ! TREE_CONSTANT (rhs))
2165 return lhs;
2166 else
2167 return fold (build (code, type, lhs, rhs));
2168 }
2169
2170 case 3:
2171 if (code == SAVE_EXPR)
2172 return exp;
2173 else if (code == COND_EXPR)
2174 return fold (build (MAX_EXPR, type,
2175 max_size (TREE_OPERAND (exp, 1), max_p),
2176 max_size (TREE_OPERAND (exp, 2), max_p)));
2177 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2178 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2179 max_size (TREE_OPERAND (exp, 1), max_p));
2180 }
2181 }
2182
2183 gigi_abort (408);
2184 }
2185 \f
2186 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2187 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2188 Return a constructor for the template. */
2189
2190 tree
2191 build_template (template_type, array_type, expr)
2192 tree template_type;
2193 tree array_type;
2194 tree expr;
2195 {
2196 tree template_elts = NULL_TREE;
2197 tree bound_list = NULL_TREE;
2198 tree field;
2199
2200 if (TREE_CODE (array_type) == RECORD_TYPE
2201 && (TYPE_IS_PADDING_P (array_type)
2202 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2203 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2204
2205 if (TREE_CODE (array_type) == ARRAY_TYPE
2206 || (TREE_CODE (array_type) == INTEGER_TYPE
2207 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2208 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2209
2210 /* First make the list for a CONSTRUCTOR for the template. Go down the
2211 field list of the template instead of the type chain because this
2212 array might be an Ada array of arrays and we can't tell where the
2213 nested arrays stop being the underlying object. */
2214
2215 for (field = TYPE_FIELDS (template_type); field;
2216 (bound_list != 0
2217 ? (bound_list = TREE_CHAIN (bound_list))
2218 : (array_type = TREE_TYPE (array_type))),
2219 field = TREE_CHAIN (TREE_CHAIN (field)))
2220 {
2221 tree bounds, min, max;
2222
2223 /* If we have a bound list, get the bounds from there. Likewise
2224 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2225 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2226 This will give us a maximum range. */
2227 if (bound_list != 0)
2228 bounds = TREE_VALUE (bound_list);
2229 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2230 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2231 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2232 && DECL_BY_COMPONENT_PTR_P (expr))
2233 bounds = TREE_TYPE (field);
2234 else
2235 gigi_abort (411);
2236
2237 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2238 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2239
2240 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2241 surround them with a WITH_RECORD_EXPR giving EXPR as the
2242 OBJECT. */
2243 if (! TREE_CONSTANT (min) && contains_placeholder_p (min))
2244 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2245 if (! TREE_CONSTANT (max) && contains_placeholder_p (max))
2246 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2247
2248 template_elts = tree_cons (TREE_CHAIN (field), max,
2249 tree_cons (field, min, template_elts));
2250 }
2251
2252 return build_constructor (template_type, nreverse (template_elts));
2253 }
2254 \f
2255 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2256 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2257 in the type contains in its DECL_INITIAL the expression to use when
2258 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2259 to print out an error message if the mechanism cannot be applied to
2260 an object of that type and also for the name. */
2261
2262 tree
2263 build_vms_descriptor (type, mech, gnat_entity)
2264 tree type;
2265 Mechanism_Type mech;
2266 Entity_Id gnat_entity;
2267 {
2268 tree record_type = make_node (RECORD_TYPE);
2269 tree field_list = 0;
2270 int class;
2271 int dtype = 0;
2272 tree inner_type;
2273 int ndim;
2274 int i;
2275 tree *idx_arr;
2276 tree tem;
2277
2278 /* If TYPE is an unconstrained array, use the underlying array type. */
2279 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2280 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2281
2282 /* If this is an array, compute the number of dimensions in the array,
2283 get the index types, and point to the inner type. */
2284 if (TREE_CODE (type) != ARRAY_TYPE)
2285 ndim = 0;
2286 else
2287 for (ndim = 1, inner_type = type;
2288 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2289 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2290 ndim++, inner_type = TREE_TYPE (inner_type))
2291 ;
2292
2293 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2294
2295 if (mech != By_Descriptor_NCA
2296 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2297 for (i = ndim - 1, inner_type = type;
2298 i >= 0;
2299 i--, inner_type = TREE_TYPE (inner_type))
2300 idx_arr[i] = TYPE_DOMAIN (inner_type);
2301 else
2302 for (i = 0, inner_type = type;
2303 i < ndim;
2304 i++, inner_type = TREE_TYPE (inner_type))
2305 idx_arr[i] = TYPE_DOMAIN (inner_type);
2306
2307 /* Now get the DTYPE value. */
2308 switch (TREE_CODE (type))
2309 {
2310 case INTEGER_TYPE:
2311 case ENUMERAL_TYPE:
2312 if (TYPE_VAX_FLOATING_POINT_P (type))
2313 switch ((int) TYPE_DIGITS_VALUE (type))
2314 {
2315 case 6:
2316 dtype = 10;
2317 break;
2318 case 9:
2319 dtype = 11;
2320 break;
2321 case 15:
2322 dtype = 27;
2323 break;
2324 }
2325 else
2326 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2327 {
2328 case 8:
2329 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2330 break;
2331 case 16:
2332 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2333 break;
2334 case 32:
2335 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2336 break;
2337 case 64:
2338 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2339 break;
2340 case 128:
2341 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2342 break;
2343 }
2344 break;
2345
2346 case REAL_TYPE:
2347 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2348 break;
2349
2350 case COMPLEX_TYPE:
2351 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2352 && TYPE_VAX_FLOATING_POINT_P (type))
2353 switch ((int) TYPE_DIGITS_VALUE (type))
2354 {
2355 case 6:
2356 dtype = 12;
2357 break;
2358 case 9:
2359 dtype = 13;
2360 break;
2361 case 15:
2362 dtype = 29;
2363 }
2364 else
2365 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2366 break;
2367
2368 case ARRAY_TYPE:
2369 dtype = 14;
2370 break;
2371
2372 default:
2373 break;
2374 }
2375
2376 /* Get the CLASS value. */
2377 switch (mech)
2378 {
2379 case By_Descriptor_A:
2380 class = 4;
2381 break;
2382 case By_Descriptor_NCA:
2383 class = 10;
2384 break;
2385 case By_Descriptor_SB:
2386 class = 15;
2387 break;
2388 default:
2389 class = 1;
2390 }
2391
2392 /* Make the type for a descriptor for VMS. The first four fields
2393 are the same for all types. */
2394
2395 field_list
2396 = chainon (field_list,
2397 make_descriptor_field
2398 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2399 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2400
2401 field_list = chainon (field_list,
2402 make_descriptor_field ("DTYPE",
2403 gnat_type_for_size (8, 1),
2404 record_type, size_int (dtype)));
2405 field_list = chainon (field_list,
2406 make_descriptor_field ("CLASS",
2407 gnat_type_for_size (8, 1),
2408 record_type, size_int (class)));
2409
2410 field_list
2411 = chainon (field_list,
2412 make_descriptor_field ("POINTER",
2413 build_pointer_type (type),
2414 record_type,
2415 build1 (ADDR_EXPR,
2416 build_pointer_type (type),
2417 build (PLACEHOLDER_EXPR,
2418 type))));
2419
2420 switch (mech)
2421 {
2422 case By_Descriptor:
2423 case By_Descriptor_S:
2424 break;
2425
2426 case By_Descriptor_SB:
2427 field_list
2428 = chainon (field_list,
2429 make_descriptor_field
2430 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2431 TREE_CODE (type) == ARRAY_TYPE
2432 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2433 field_list
2434 = chainon (field_list,
2435 make_descriptor_field
2436 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2437 TREE_CODE (type) == ARRAY_TYPE
2438 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2439 break;
2440
2441 case By_Descriptor_A:
2442 case By_Descriptor_NCA:
2443 field_list = chainon (field_list,
2444 make_descriptor_field ("SCALE",
2445 gnat_type_for_size (8, 1),
2446 record_type,
2447 size_zero_node));
2448
2449 field_list = chainon (field_list,
2450 make_descriptor_field ("DIGITS",
2451 gnat_type_for_size (8, 1),
2452 record_type,
2453 size_zero_node));
2454
2455 field_list
2456 = chainon (field_list,
2457 make_descriptor_field
2458 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2459 size_int (mech == By_Descriptor_NCA
2460 ? 0
2461 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2462 : (TREE_CODE (type) == ARRAY_TYPE
2463 && TYPE_CONVENTION_FORTRAN_P (type)
2464 ? 224 : 192))));
2465
2466 field_list = chainon (field_list,
2467 make_descriptor_field ("DIMCT",
2468 gnat_type_for_size (8, 1),
2469 record_type,
2470 size_int (ndim)));
2471
2472 field_list = chainon (field_list,
2473 make_descriptor_field ("ARSIZE",
2474 gnat_type_for_size (32, 1),
2475 record_type,
2476 size_in_bytes (type)));
2477
2478 /* Now build a pointer to the 0,0,0... element. */
2479 tem = build (PLACEHOLDER_EXPR, type);
2480 for (i = 0, inner_type = type; i < ndim;
2481 i++, inner_type = TREE_TYPE (inner_type))
2482 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2483 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2484
2485 field_list
2486 = chainon (field_list,
2487 make_descriptor_field
2488 ("A0", build_pointer_type (inner_type), record_type,
2489 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2490
2491 /* Next come the addressing coefficients. */
2492 tem = size_int (1);
2493 for (i = 0; i < ndim; i++)
2494 {
2495 char fname[3];
2496 tree idx_length
2497 = size_binop (MULT_EXPR, tem,
2498 size_binop (PLUS_EXPR,
2499 size_binop (MINUS_EXPR,
2500 TYPE_MAX_VALUE (idx_arr[i]),
2501 TYPE_MIN_VALUE (idx_arr[i])),
2502 size_int (1)));
2503
2504 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2505 fname[1] = '0' + i, fname[2] = 0;
2506 field_list
2507 = chainon (field_list,
2508 make_descriptor_field (fname,
2509 gnat_type_for_size (32, 1),
2510 record_type, idx_length));
2511
2512 if (mech == By_Descriptor_NCA)
2513 tem = idx_length;
2514 }
2515
2516 /* Finally here are the bounds. */
2517 for (i = 0; i < ndim; i++)
2518 {
2519 char fname[3];
2520
2521 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2522 field_list
2523 = chainon (field_list,
2524 make_descriptor_field
2525 (fname, gnat_type_for_size (32, 1), record_type,
2526 TYPE_MIN_VALUE (idx_arr[i])));
2527
2528 fname[0] = 'U';
2529 field_list
2530 = chainon (field_list,
2531 make_descriptor_field
2532 (fname, gnat_type_for_size (32, 1), record_type,
2533 TYPE_MAX_VALUE (idx_arr[i])));
2534 }
2535 break;
2536
2537 default:
2538 post_error ("unsupported descriptor type for &", gnat_entity);
2539 }
2540
2541 finish_record_type (record_type, field_list, 0, 1);
2542 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2543 record_type));
2544
2545 return record_type;
2546 }
2547
2548 /* Utility routine for above code to make a field. */
2549
2550 static tree
2551 make_descriptor_field (name, type, rec_type, initial)
2552 const char *name;
2553 tree type;
2554 tree rec_type;
2555 tree initial;
2556 {
2557 tree field
2558 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2559
2560 DECL_INITIAL (field) = initial;
2561 return field;
2562 }
2563 \f
2564 /* Build a type to be used to represent an aliased object whose nominal
2565 type is an unconstrained array. This consists of a RECORD_TYPE containing
2566 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2567 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2568 is used to represent an arbitrary unconstrained object. Use NAME
2569 as the name of the record. */
2570
2571 tree
2572 build_unc_object_type (template_type, object_type, name)
2573 tree template_type;
2574 tree object_type;
2575 tree name;
2576 {
2577 tree type = make_node (RECORD_TYPE);
2578 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2579 template_type, type, 0, 0, 0, 1);
2580 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2581 type, 0, 0, 0, 1);
2582
2583 TYPE_NAME (type) = name;
2584 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2585 finish_record_type (type,
2586 chainon (chainon (NULL_TREE, template_field),
2587 array_field),
2588 0, 0);
2589
2590 return type;
2591 }
2592 \f
2593 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2594 the normal case this is just two adjustments, but we have more to do
2595 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2596
2597 void
2598 update_pointer_to (old_type, new_type)
2599 tree old_type;
2600 tree new_type;
2601 {
2602 tree ptr = TYPE_POINTER_TO (old_type);
2603 tree ref = TYPE_REFERENCE_TO (old_type);
2604 tree type;
2605
2606 /* If this is the main variant, process all the other variants first. */
2607 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2608 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2609 type = TYPE_NEXT_VARIANT (type))
2610 update_pointer_to (type, new_type);
2611
2612 /* If no pointer or reference, we are done. Otherwise, get the new type with
2613 the same qualifiers as the old type and see if it is the same as the old
2614 type. */
2615 if (ptr == 0 && ref == 0)
2616 return;
2617
2618 new_type = build_qualified_type (new_type, TYPE_QUALS (old_type));
2619 if (old_type == new_type)
2620 return;
2621
2622 /* First handle the simple case. */
2623 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2624 {
2625 if (ptr != 0)
2626 TREE_TYPE (ptr) = new_type;
2627 TYPE_POINTER_TO (new_type) = ptr;
2628
2629 if (ref != 0)
2630 TREE_TYPE (ref) = new_type;
2631 TYPE_REFERENCE_TO (new_type) = ref;
2632
2633 if (ptr != 0 && TYPE_NAME (ptr) != 0
2634 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2635 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2636 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2637 global_bindings_p (), 0);
2638 if (ref != 0 && TYPE_NAME (ref) != 0
2639 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2640 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2641 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2642 global_bindings_p (), 0);
2643 }
2644
2645 /* Now deal with the unconstrained array case. In this case the "pointer"
2646 is actually a RECORD_TYPE where the types of both fields are
2647 pointers to void. In that case, copy the field list from the
2648 old type to the new one and update the fields' context. */
2649 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2650 gigi_abort (412);
2651
2652 else
2653 {
2654 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2655 tree ptr_temp_type;
2656 tree new_ref;
2657 tree var;
2658
2659 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2660 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2661 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2662
2663 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2664 template bounds.
2665
2666 ??? This is now the only use of gnat_substitute_in_type, which
2667 is now a very "heavy" routine to do this, so it should be replaced
2668 at some point. */
2669 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2670 new_ref = build (COMPONENT_REF, ptr_temp_type,
2671 build (PLACEHOLDER_EXPR, ptr),
2672 TREE_CHAIN (TYPE_FIELDS (ptr)));
2673
2674 update_pointer_to
2675 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2676 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2677 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2678
2679 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2680 TYPE_UNCONSTRAINED_ARRAY (var) = new_type;
2681
2682 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2683 = TREE_TYPE (new_type) = ptr;
2684
2685 /* Now handle updating the allocation record, what the thin pointer
2686 points to. Update all pointers from the old record into the new
2687 one, update the types of the fields, and recompute the size. */
2688
2689 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2690
2691 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2692 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2693 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2694 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2695 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2696 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2697 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2698
2699 TYPE_SIZE (new_obj_rec)
2700 = size_binop (PLUS_EXPR,
2701 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2702 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2703 TYPE_SIZE_UNIT (new_obj_rec)
2704 = size_binop (PLUS_EXPR,
2705 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2706 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2707 rest_of_type_compilation (ptr, global_bindings_p ());
2708 }
2709 }
2710 \f
2711 /* Convert a pointer to a constrained array into a pointer to a fat
2712 pointer. This involves making or finding a template. */
2713
2714 static tree
2715 convert_to_fat_pointer (type, expr)
2716 tree type;
2717 tree expr;
2718 {
2719 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2720 tree template, template_addr;
2721 tree etype = TREE_TYPE (expr);
2722
2723 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2724 pointer to the template and array. */
2725 if (integer_zerop (expr))
2726 return
2727 build_constructor
2728 (type,
2729 tree_cons (TYPE_FIELDS (type),
2730 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2731 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2732 convert (build_pointer_type (template_type),
2733 expr),
2734 NULL_TREE)));
2735
2736 /* If EXPR is a thin pointer, make the template and data from the record. */
2737
2738 else if (TYPE_THIN_POINTER_P (etype))
2739 {
2740 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2741
2742 expr = save_expr (expr);
2743 if (TREE_CODE (expr) == ADDR_EXPR)
2744 expr = TREE_OPERAND (expr, 0);
2745 else
2746 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2747
2748 template = build_component_ref (expr, NULL_TREE, fields);
2749 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2750 build_component_ref (expr, NULL_TREE,
2751 TREE_CHAIN (fields)));
2752 }
2753 else
2754 /* Otherwise, build the constructor for the template. */
2755 template = build_template (template_type, TREE_TYPE (etype), expr);
2756
2757 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2758
2759 /* The result is a CONSTRUCTOR for the fat pointer. */
2760 return
2761 build_constructor (type,
2762 tree_cons (TYPE_FIELDS (type), expr,
2763 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2764 template_addr, NULL_TREE)));
2765 }
2766 \f
2767 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2768 is something that is a fat pointer, so convert to it first if it EXPR
2769 is not already a fat pointer. */
2770
2771 static tree
2772 convert_to_thin_pointer (type, expr)
2773 tree type;
2774 tree expr;
2775 {
2776 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2777 expr
2778 = convert_to_fat_pointer
2779 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2780
2781 /* We get the pointer to the data and use a NOP_EXPR to make it the
2782 proper GCC type. */
2783 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2784 expr = build1 (NOP_EXPR, type, expr);
2785
2786 return expr;
2787 }
2788 \f
2789 /* Create an expression whose value is that of EXPR,
2790 converted to type TYPE. The TREE_TYPE of the value
2791 is always TYPE. This function implements all reasonable
2792 conversions; callers should filter out those that are
2793 not permitted by the language being compiled. */
2794
2795 tree
2796 convert (type, expr)
2797 tree type, expr;
2798 {
2799 enum tree_code code = TREE_CODE (type);
2800 tree etype = TREE_TYPE (expr);
2801 enum tree_code ecode = TREE_CODE (etype);
2802 tree tem;
2803
2804 /* If EXPR is already the right type, we are done. */
2805 if (type == etype)
2806 return expr;
2807
2808 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2809 new one. */
2810 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2811 return build (WITH_RECORD_EXPR, type,
2812 convert (type, TREE_OPERAND (expr, 0)),
2813 TREE_OPERAND (expr, 1));
2814
2815 /* If the input type has padding, remove it by doing a component reference
2816 to the field. If the output type has padding, make a constructor
2817 to build the record. If both input and output have padding and are
2818 of variable size, do this as an unchecked conversion. */
2819 if (ecode == RECORD_TYPE && code == RECORD_TYPE
2820 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2821 && (! TREE_CONSTANT (TYPE_SIZE (type))
2822 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2823 ;
2824 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2825 {
2826 /* If we have just converted to this padded type, just get
2827 the inner expression. */
2828 if (TREE_CODE (expr) == CONSTRUCTOR
2829 && CONSTRUCTOR_ELTS (expr) != 0
2830 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2831 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2832 else
2833 return convert (type, build_component_ref (expr, NULL_TREE,
2834 TYPE_FIELDS (etype)));
2835 }
2836 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2837 {
2838 /* If we previously converted from another type and our type is
2839 of variable size, remove the conversion to avoid the need for
2840 variable-size temporaries. */
2841 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2842 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2843 expr = TREE_OPERAND (expr, 0);
2844
2845 /* If we are just removing the padding from expr, convert the original
2846 object if we have variable size. That will avoid the need
2847 for some variable-size temporaries. */
2848 if (TREE_CODE (expr) == COMPONENT_REF
2849 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2850 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2851 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2852 return convert (type, TREE_OPERAND (expr, 0));
2853
2854 /* If the result type is a padded type with a self-referentially-sized
2855 field and the expression type is a record, do this as an
2856 unchecked converstion. */
2857 else if (TREE_CODE (DECL_SIZE (TYPE_FIELDS (type))) != INTEGER_CST
2858 && contains_placeholder_p (DECL_SIZE (TYPE_FIELDS (type)))
2859 && TREE_CODE (etype) == RECORD_TYPE)
2860 return unchecked_convert (type, expr);
2861
2862 else
2863 return
2864 build_constructor (type,
2865 tree_cons (TYPE_FIELDS (type),
2866 convert (TREE_TYPE
2867 (TYPE_FIELDS (type)),
2868 expr),
2869 NULL_TREE));
2870 }
2871
2872 /* If the input is a biased type, adjust first. */
2873 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2874 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2875 fold (build1 (GNAT_NOP_EXPR,
2876 TREE_TYPE (etype), expr)),
2877 TYPE_MIN_VALUE (etype))));
2878
2879 /* If the input is a left-justified modular type, we need to extract
2880 the actual object before converting it to any other type with the
2881 exception of an unconstrained array. */
2882 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2883 && code != UNCONSTRAINED_ARRAY_TYPE)
2884 return convert (type, build_component_ref (expr, NULL_TREE,
2885 TYPE_FIELDS (etype)));
2886
2887 /* If converting a type that does not contain a template into one
2888 that does, convert to the data type and then build the template. */
2889 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type)
2890 && ! (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype)))
2891 {
2892 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2893
2894 return
2895 build_constructor
2896 (type,
2897 tree_cons (TYPE_FIELDS (type),
2898 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2899 obj_type, NULL_TREE),
2900 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2901 convert (obj_type, expr), NULL_TREE)));
2902 }
2903
2904 /* There are some special cases of expressions that we process
2905 specially. */
2906 switch (TREE_CODE (expr))
2907 {
2908 case ERROR_MARK:
2909 return expr;
2910
2911 case TRANSFORM_EXPR:
2912 case NULL_EXPR:
2913 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2914 conversion in gnat_expand_expr. NULL_EXPR does not represent
2915 and actual value, so no conversion is needed. */
2916 TREE_TYPE (expr) = type;
2917 return expr;
2918
2919 case STRING_CST:
2920 case CONSTRUCTOR:
2921 /* If we are converting a STRING_CST to another constrained array type,
2922 just make a new one in the proper type. Likewise for a
2923 CONSTRUCTOR. But if the mode of the type is different, we must
2924 ensure a new RTL is made for the constant. */
2925 if (code == ecode && AGGREGATE_TYPE_P (etype)
2926 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2927 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2928 {
2929 expr = copy_node (expr);
2930 TREE_TYPE (expr) = type;
2931
2932 if (TYPE_MODE (type) != TYPE_MODE (etype))
2933 TREE_CST_RTL (expr) = 0;
2934
2935 return expr;
2936 }
2937 break;
2938
2939 case COMPONENT_REF:
2940 /* If we are converting between two aggregate types of the same
2941 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2942 This avoid unneeded conversions which makes reference computations
2943 more complex. */
2944 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2945 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2946 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2947 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2948 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2949 TREE_OPERAND (expr, 1));
2950
2951 break;
2952
2953 case UNCONSTRAINED_ARRAY_REF:
2954 /* Convert this to the type of the inner array by getting the address of
2955 the array from the template. */
2956 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2957 build_component_ref (TREE_OPERAND (expr, 0),
2958 get_identifier ("P_ARRAY"),
2959 NULL_TREE));
2960 etype = TREE_TYPE (expr);
2961 ecode = TREE_CODE (etype);
2962 break;
2963
2964 case VIEW_CONVERT_EXPR:
2965 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2966 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2967 return convert (type, TREE_OPERAND (expr, 0));
2968 break;
2969
2970 case INDIRECT_REF:
2971 /* If both types are record types, just convert the pointer and
2972 make a new INDIRECT_REF.
2973
2974 ??? Disable this for now since it causes problems with the
2975 code in build_binary_op for MODIFY_EXPR which wants to
2976 strip off conversions. But that code really is a mess and
2977 we need to do this a much better way some time. */
2978 if (0
2979 && (TREE_CODE (type) == RECORD_TYPE
2980 || TREE_CODE (type) == UNION_TYPE)
2981 && (TREE_CODE (etype) == RECORD_TYPE
2982 || TREE_CODE (etype) == UNION_TYPE)
2983 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2984 return build_unary_op (INDIRECT_REF, NULL_TREE,
2985 convert (build_pointer_type (type),
2986 TREE_OPERAND (expr, 0)));
2987 break;
2988
2989 default:
2990 break;
2991 }
2992
2993 /* Check for converting to a pointer to an unconstrained array. */
2994 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2995 return convert_to_fat_pointer (type, expr);
2996
2997 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2998 || (code == INTEGER_CST && ecode == INTEGER_CST
2999 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3000 return fold (build1 (NOP_EXPR, type, expr));
3001
3002 switch (code)
3003 {
3004 case VOID_TYPE:
3005 return build1 (CONVERT_EXPR, type, expr);
3006
3007 case INTEGER_TYPE:
3008 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3009 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE))
3010 return unchecked_convert (type, expr);
3011 else if (TYPE_BIASED_REPRESENTATION_P (type))
3012 return fold (build1 (CONVERT_EXPR, type,
3013 fold (build (MINUS_EXPR, TREE_TYPE (type),
3014 convert (TREE_TYPE (type), expr),
3015 TYPE_MIN_VALUE (type)))));
3016
3017 /* ... fall through ... */
3018
3019 case ENUMERAL_TYPE:
3020 return fold (convert_to_integer (type, expr));
3021
3022 case POINTER_TYPE:
3023 case REFERENCE_TYPE:
3024 /* If converting between two pointers to records denoting
3025 both a template and type, adjust if needed to account
3026 for any differing offsets, since one might be negative. */
3027 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3028 {
3029 tree bit_diff
3030 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3031 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3032 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3033 sbitsize_int (BITS_PER_UNIT));
3034
3035 expr = build1 (NOP_EXPR, type, expr);
3036 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3037 if (integer_zerop (byte_diff))
3038 return expr;
3039
3040 return build_binary_op (PLUS_EXPR, type, expr,
3041 fold (convert_to_pointer (type, byte_diff)));
3042 }
3043
3044 /* If converting to a thin pointer, handle specially. */
3045 if (TYPE_THIN_POINTER_P (type)
3046 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3047 return convert_to_thin_pointer (type, expr);
3048
3049 /* If converting fat pointer to normal pointer, get the pointer to the
3050 array and then convert it. */
3051 else if (TYPE_FAT_POINTER_P (etype))
3052 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3053 NULL_TREE);
3054
3055 return fold (convert_to_pointer (type, expr));
3056
3057 case REAL_TYPE:
3058 return fold (convert_to_real (type, expr));
3059
3060 case RECORD_TYPE:
3061 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3062 return
3063 build_constructor
3064 (type, tree_cons (TYPE_FIELDS (type),
3065 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3066 NULL_TREE));
3067
3068 /* ... fall through ... */
3069
3070 case ARRAY_TYPE:
3071 /* In these cases, assume the front-end has validated the conversion.
3072 If the conversion is valid, it will be a bit-wise conversion, so
3073 it can be viewed as an unchecked conversion. */
3074 return unchecked_convert (type, expr);
3075
3076 case UNION_TYPE:
3077 /* Just validate that the type is indeed that of a field
3078 of the type. Then make the simple conversion. */
3079 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3080 if (TREE_TYPE (tem) == etype)
3081 return build1 (CONVERT_EXPR, type, expr);
3082
3083 gigi_abort (413);
3084
3085 case UNCONSTRAINED_ARRAY_TYPE:
3086 /* If EXPR is a constrained array, take its address, convert it to a
3087 fat pointer, and then dereference it. Likewise if EXPR is a
3088 record containing both a template and a constrained array.
3089 Note that a record representing a left justified modular type
3090 always represents a packed constrained array. */
3091 if (ecode == ARRAY_TYPE
3092 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3093 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3094 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3095 return
3096 build_unary_op
3097 (INDIRECT_REF, NULL_TREE,
3098 convert_to_fat_pointer (TREE_TYPE (type),
3099 build_unary_op (ADDR_EXPR,
3100 NULL_TREE, expr)));
3101
3102 /* Do something very similar for converting one unconstrained
3103 array to another. */
3104 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3105 return
3106 build_unary_op (INDIRECT_REF, NULL_TREE,
3107 convert (TREE_TYPE (type),
3108 build_unary_op (ADDR_EXPR,
3109 NULL_TREE, expr)));
3110 else
3111 gigi_abort (409);
3112
3113 case COMPLEX_TYPE:
3114 return fold (convert_to_complex (type, expr));
3115
3116 default:
3117 gigi_abort (410);
3118 }
3119 }
3120 \f
3121 /* Remove all conversions that are done in EXP. This includes converting
3122 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3123 is nonzero, always return the address of the containing object even if
3124 the address is not bit-aligned. */
3125
3126 tree
3127 remove_conversions (exp, true_address)
3128 tree exp;
3129 int true_address;
3130 {
3131 switch (TREE_CODE (exp))
3132 {
3133 case CONSTRUCTOR:
3134 if (true_address
3135 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3136 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3137 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3138 break;
3139
3140 case COMPONENT_REF:
3141 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3142 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3143 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3144 break;
3145
3146 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3147 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3148 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3149
3150 default:
3151 break;
3152 }
3153
3154 return exp;
3155 }
3156 \f
3157 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3158 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3159 likewise return an expression pointing to the underlying array. */
3160
3161 tree
3162 maybe_unconstrained_array (exp)
3163 tree exp;
3164 {
3165 enum tree_code code = TREE_CODE (exp);
3166 tree new;
3167
3168 switch (TREE_CODE (TREE_TYPE (exp)))
3169 {
3170 case UNCONSTRAINED_ARRAY_TYPE:
3171 if (code == UNCONSTRAINED_ARRAY_REF)
3172 {
3173 new
3174 = build_unary_op (INDIRECT_REF, NULL_TREE,
3175 build_component_ref (TREE_OPERAND (exp, 0),
3176 get_identifier ("P_ARRAY"),
3177 NULL_TREE));
3178 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3179 return new;
3180 }
3181
3182 else if (code == NULL_EXPR)
3183 return build1 (NULL_EXPR,
3184 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3185 (TREE_TYPE (TREE_TYPE (exp))))),
3186 TREE_OPERAND (exp, 0));
3187
3188 else if (code == WITH_RECORD_EXPR
3189 && (TREE_OPERAND (exp, 0)
3190 != (new = maybe_unconstrained_array
3191 (TREE_OPERAND (exp, 0)))))
3192 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3193 TREE_OPERAND (exp, 1));
3194
3195 case RECORD_TYPE:
3196 if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3197 {
3198 new
3199 = build_component_ref (exp, NULL_TREE,
3200 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
3201 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3202 && TYPE_IS_PADDING_P (TREE_TYPE (new)))
3203 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (new))), new);
3204
3205 return new;
3206 }
3207 break;
3208
3209 default:
3210 break;
3211 }
3212
3213 return exp;
3214 }
3215 \f
3216 /* Return an expression that does an unchecked converstion of EXPR to TYPE. */
3217
3218 tree
3219 unchecked_convert (type, expr)
3220 tree type;
3221 tree expr;
3222 {
3223 tree etype = TREE_TYPE (expr);
3224
3225 /* If the expression is already the right type, we are done. */
3226 if (etype == type)
3227 return expr;
3228
3229 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3230 new one. */
3231 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3232 return build (WITH_RECORD_EXPR, type,
3233 unchecked_convert (type, TREE_OPERAND (expr, 0)),
3234 TREE_OPERAND (expr, 1));
3235
3236 /* If both types types are integral just do a normal conversion.
3237 Likewise for a conversion to an unconstrained array. */
3238 if ((((INTEGRAL_TYPE_P (type)
3239 && ! (TREE_CODE (type) == INTEGER_TYPE
3240 && TYPE_VAX_FLOATING_POINT_P (type)))
3241 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3242 || (TREE_CODE (type) == RECORD_TYPE
3243 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3244 && ((INTEGRAL_TYPE_P (etype)
3245 && ! (TREE_CODE (etype) == INTEGER_TYPE
3246 && TYPE_VAX_FLOATING_POINT_P (etype)))
3247 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3248 || (TREE_CODE (etype) == RECORD_TYPE
3249 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3250 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3251 {
3252 tree rtype = type;
3253
3254 if (TREE_CODE (etype) == INTEGER_TYPE
3255 && TYPE_BIASED_REPRESENTATION_P (etype))
3256 {
3257 tree ntype = copy_type (etype);
3258
3259 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3260 TYPE_MAIN_VARIANT (ntype) = ntype;
3261 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3262 }
3263
3264 if (TREE_CODE (type) == INTEGER_TYPE
3265 && TYPE_BIASED_REPRESENTATION_P (type))
3266 {
3267 rtype = copy_type (type);
3268 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3269 TYPE_MAIN_VARIANT (rtype) = rtype;
3270 }
3271
3272 expr = convert (rtype, expr);
3273 if (type != rtype)
3274 expr = build1 (GNAT_NOP_EXPR, type, expr);
3275 }
3276
3277 /* If we are converting TO an integral type whose precision is not the
3278 same as its size, first unchecked convert to a record that contains
3279 an object of the output type. Then extract the field. */
3280 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3281 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3282 GET_MODE_BITSIZE (TYPE_MODE (type))))
3283 {
3284 tree rec_type = make_node (RECORD_TYPE);
3285 tree field = create_field_decl (get_identifier ("OBJ"), type,
3286 rec_type, 1, 0, 0, 0);
3287
3288 TYPE_FIELDS (rec_type) = field;
3289 layout_type (rec_type);
3290
3291 expr = unchecked_convert (rec_type, expr);
3292 expr = build_component_ref (expr, NULL_TREE, field);
3293 }
3294
3295 /* Similarly for integral input type whose precision is not equal to its
3296 size. */
3297 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3298 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3299 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3300 {
3301 tree rec_type = make_node (RECORD_TYPE);
3302 tree field
3303 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3304 1, 0, 0, 0);
3305
3306 TYPE_FIELDS (rec_type) = field;
3307 layout_type (rec_type);
3308
3309 expr = build_constructor (rec_type, build_tree_list (field, expr));
3310 expr = unchecked_convert (type, expr);
3311 }
3312
3313 /* We have a special case when we are converting between two
3314 unconstrained array types. In that case, take the address,
3315 convert the fat pointer types, and dereference. */
3316 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3317 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3318 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3319 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3320 build_unary_op (ADDR_EXPR, NULL_TREE,
3321 expr)));
3322 else
3323 {
3324 expr = maybe_unconstrained_array (expr);
3325 etype = TREE_TYPE (expr);
3326 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3327 }
3328
3329 /* If the result is an integral type whose size is not equal to
3330 the size of the underlying machine type, sign- or zero-extend
3331 the result. We need not do this in the case where the input is
3332 an integral type of the same precision and signedness or if the output
3333 is a biased type or if both the input and output are unsigned. */
3334 if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3335 && ! (TREE_CODE (type) == INTEGER_TYPE
3336 && TYPE_BIASED_REPRESENTATION_P (type))
3337 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3338 GET_MODE_BITSIZE (TYPE_MODE (type)))
3339 && ! (INTEGRAL_TYPE_P (etype)
3340 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3341 && operand_equal_p (TYPE_RM_SIZE (type),
3342 (TYPE_RM_SIZE (etype) != 0
3343 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3344 0))
3345 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3346 {
3347 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3348 TREE_UNSIGNED (type));
3349 tree shift_expr
3350 = convert (base_type,
3351 size_binop (MINUS_EXPR,
3352 bitsize_int
3353 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3354 TYPE_RM_SIZE (type)));
3355 expr
3356 = convert (type,
3357 build_binary_op (RSHIFT_EXPR, base_type,
3358 build_binary_op (LSHIFT_EXPR, base_type,
3359 convert (base_type, expr),
3360 shift_expr),
3361 shift_expr));
3362 }
3363
3364 /* An unchecked conversion should never raise Constraint_Error. The code
3365 below assumes that GCC's conversion routines overflow the same way that
3366 the underlying hardware does. This is probably true. In the rare case
3367 when it is false, we can rely on the fact that such conversions are
3368 erroneous anyway. */
3369 if (TREE_CODE (expr) == INTEGER_CST)
3370 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3371
3372 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3373 show no longer constant. */
3374 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3375 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3376 TREE_CONSTANT (expr) = 0;
3377
3378 return expr;
3379 }