[multiple changes]
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41
42 #include "ada.h"
43 #include "types.h"
44 #include "atree.h"
45 #include "elists.h"
46 #include "namet.h"
47 #include "nlists.h"
48 #include "stringt.h"
49 #include "uintp.h"
50 #include "fe.h"
51 #include "sinfo.h"
52 #include "einfo.h"
53 #include "ada-tree.h"
54 #include "gigi.h"
55
56 #ifndef MAX_FIXED_MODE_SIZE
57 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
58 #endif
59
60 #ifndef MAX_BITS_PER_WORD
61 #define MAX_BITS_PER_WORD BITS_PER_WORD
62 #endif
63
64 /* If nonzero, pretend we are allocating at global level. */
65 int force_global;
66
67 /* Tree nodes for the various types and decls we create. */
68 tree gnat_std_decls[(int) ADT_LAST];
69
70 /* Functions to call for each of the possible raise reasons. */
71 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
72
73 /* Associates a GNAT tree node to a GCC tree node. It is used in
74 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
75 of `save_gnu_tree' for more info. */
76 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
77
78 /* This listhead is used to record any global objects that need elaboration.
79 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
80 initial value to assign. */
81
82 static GTY(()) tree pending_elaborations;
83
84 /* This stack allows us to momentarily switch to generating elaboration
85 lists for an inner context. */
86
87 struct e_stack GTY(()) {
88 struct e_stack *next;
89 tree elab_list;
90 };
91 static GTY(()) struct e_stack *elist_stack;
92
93 /* This variable keeps a table for types for each precision so that we only
94 allocate each of them once. Signed and unsigned types are kept separate.
95
96 Note that these types are only used when fold-const requests something
97 special. Perhaps we should NOT share these types; we'll see how it
98 goes later. */
99 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
100
101 /* Likewise for float types, but record these by mode. */
102 static GTY(()) tree float_types[NUM_MACHINE_MODES];
103
104 /* For each binding contour we allocate a binding_level structure which records
105 the entities defined or declared in that contour. Contours include:
106
107 the global one
108 one for each subprogram definition
109 one for each compound statement (declare block)
110
111 Binding contours are used to create GCC tree BLOCK nodes. */
112
113 struct binding_level GTY(())
114 {
115 /* A chain of ..._DECL nodes for all variables, constants, functions,
116 parameters and type declarations. These ..._DECL nodes are chained
117 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
118 in the reverse of the order supplied to be compatible with the
119 back-end. */
120 tree names;
121 /* For each level (except the global one), a chain of BLOCK nodes for all
122 the levels that were entered and exited one level down from this one. */
123 tree blocks;
124 /* The BLOCK node for this level, if one has been preallocated.
125 If 0, the BLOCK is allocated (if needed) when the level is popped. */
126 tree this_block;
127 /* The binding level containing this one (the enclosing binding level). */
128 struct binding_level *level_chain;
129 };
130
131 /* The binding level currently in effect. */
132 static GTY(()) struct binding_level *current_binding_level;
133
134 /* A chain of binding_level structures awaiting reuse. */
135 static GTY((deletable (""))) struct binding_level *free_binding_level;
136
137 /* The outermost binding level. This binding level is created when the
138 compiler is started and it will exist through the entire compilation. */
139 static struct binding_level *global_binding_level;
140
141 /* Binding level structures are initialized by copying this one. */
142 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
143
144 struct language_function GTY(())
145 {
146 int unused;
147 };
148
149 static tree merge_sizes (tree, tree, tree, int, int);
150 static tree compute_related_constant (tree, tree);
151 static tree split_plus (tree, tree *);
152 static int value_zerop (tree);
153 static tree float_type_for_precision (int, enum machine_mode);
154 static tree convert_to_fat_pointer (tree, tree);
155 static tree convert_to_thin_pointer (tree, tree);
156 static tree make_descriptor_field (const char *,tree, tree, tree);
157 static int value_factor_p (tree, int);
158 static int potential_alignment_gap (tree, tree, tree);
159 \f
160 /* Initialize the association of GNAT nodes to GCC trees. */
161
162 void
163 init_gnat_to_gnu (void)
164 {
165 associate_gnat_to_gnu
166 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
167
168 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
169 }
170
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
174
175 If GNU_DECL is zero, a previous association is to be reset. */
176
177 void
178 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
179 {
180 /* Check that GNAT_ENTITY is not already defined and that it is being set
181 to something which is a decl. Raise gigi 401 if not. Usually, this
182 means GNAT_ENTITY is defined twice, but occasionally is due to some
183 Gigi problem. */
184 if (gnu_decl
185 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
186 || (! no_check && ! DECL_P (gnu_decl))))
187 gigi_abort (401);
188
189 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
190 }
191
192 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
193 Return the ..._DECL node that was associated with it. If there is no tree
194 node associated with GNAT_ENTITY, abort.
195
196 In some cases, such as delayed elaboration or expressions that need to
197 be elaborated only once, GNAT_ENTITY is really not an entity. */
198
199 tree
200 get_gnu_tree (Entity_Id gnat_entity)
201 {
202 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
203 gigi_abort (402);
204
205 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
206 }
207
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
209
210 int
211 present_gnu_tree (Entity_Id gnat_entity)
212 {
213 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
214 }
215
216 \f
217 /* Return non-zero if we are currently in the global binding level. */
218
219 int
220 global_bindings_p (void)
221 {
222 return (force_global != 0 || current_binding_level == global_binding_level
223 ? -1 : 0);
224 }
225
226 /* Return the list of declarations in the current level. Note that this list
227 is in reverse order (it has to be so for back-end compatibility). */
228
229 tree
230 getdecls (void)
231 {
232 return current_binding_level->names;
233 }
234
235 /* Nonzero if the current level needs to have a BLOCK made. */
236
237 int
238 kept_level_p (void)
239 {
240 return (current_binding_level->names != 0);
241 }
242
243 /* Enter a new binding level. The input parameter is ignored, but has to be
244 specified for back-end compatibility. */
245
246 void
247 pushlevel (int ignore ATTRIBUTE_UNUSED)
248 {
249 struct binding_level *newlevel = NULL;
250
251 /* Reuse a struct for this binding level, if there is one. */
252 if (free_binding_level)
253 {
254 newlevel = free_binding_level;
255 free_binding_level = free_binding_level->level_chain;
256 }
257 else
258 newlevel
259 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
260
261 *newlevel = clear_binding_level;
262
263 /* Add this level to the front of the chain (stack) of levels that are
264 active. */
265 newlevel->level_chain = current_binding_level;
266 current_binding_level = newlevel;
267 }
268
269 /* Exit a binding level.
270 Pop the level off, and restore the state of the identifier-decl mappings
271 that were in effect when this level was entered.
272
273 If KEEP is nonzero, this level had explicit declarations, so
274 and create a "block" (a BLOCK node) for the level
275 to record its declarations and subblocks for symbol table output.
276
277 If FUNCTIONBODY is nonzero, this level is the body of a function,
278 so create a block as if KEEP were set and also clear out all
279 label names.
280
281 If REVERSE is nonzero, reverse the order of decls before putting
282 them into the BLOCK. */
283
284 tree
285 poplevel (int keep, int reverse, int functionbody)
286 {
287 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
288 binding level that we are about to exit and which is returned by this
289 routine. */
290 tree block = NULL_TREE;
291 tree decl_chain;
292 tree decl_node;
293 tree subblock_chain = current_binding_level->blocks;
294 tree subblock_node;
295 int block_previously_created;
296
297 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
298 nodes chained through the `names' field of current_binding_level are in
299 reverse order except for PARM_DECL node, which are explicitly stored in
300 the right order. */
301 current_binding_level->names
302 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
303 : current_binding_level->names;
304
305 /* Output any nested inline functions within this block which must be
306 compiled because their address is needed. */
307 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
308 if (TREE_CODE (decl_node) == FUNCTION_DECL
309 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
310 && DECL_INITIAL (decl_node) != 0)
311 {
312 push_function_context ();
313 /* ??? This is temporary. */
314 ggc_push_context ();
315 output_inline_function (decl_node);
316 ggc_pop_context ();
317 pop_function_context ();
318 }
319
320 block = 0;
321 block_previously_created = (current_binding_level->this_block != 0);
322 if (block_previously_created)
323 block = current_binding_level->this_block;
324 else if (keep || functionbody)
325 block = make_node (BLOCK);
326 if (block != 0)
327 {
328 BLOCK_VARS (block) = keep ? decl_chain : 0;
329 BLOCK_SUBBLOCKS (block) = subblock_chain;
330 }
331
332 /* Record the BLOCK node just built as the subblock its enclosing scope. */
333 for (subblock_node = subblock_chain; subblock_node;
334 subblock_node = TREE_CHAIN (subblock_node))
335 BLOCK_SUPERCONTEXT (subblock_node) = block;
336
337 /* Clear out the meanings of the local variables of this level. */
338
339 for (subblock_node = decl_chain; subblock_node;
340 subblock_node = TREE_CHAIN (subblock_node))
341 if (DECL_NAME (subblock_node) != 0)
342 /* If the identifier was used or addressed via a local extern decl,
343 don't forget that fact. */
344 if (DECL_EXTERNAL (subblock_node))
345 {
346 if (TREE_USED (subblock_node))
347 TREE_USED (DECL_NAME (subblock_node)) = 1;
348 if (TREE_ADDRESSABLE (subblock_node))
349 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
350 }
351
352 {
353 /* Pop the current level, and free the structure for reuse. */
354 struct binding_level *level = current_binding_level;
355 current_binding_level = current_binding_level->level_chain;
356 level->level_chain = free_binding_level;
357 free_binding_level = level;
358 }
359
360 if (functionbody)
361 {
362 /* This is the top level block of a function. The ..._DECL chain stored
363 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
364 leave them in the BLOCK because they are found in the FUNCTION_DECL
365 instead. */
366 DECL_INITIAL (current_function_decl) = block;
367 BLOCK_VARS (block) = 0;
368 }
369 else if (block)
370 {
371 if (!block_previously_created)
372 current_binding_level->blocks
373 = chainon (current_binding_level->blocks, block);
374 }
375
376 /* If we did not make a block for the level just exited, any blocks made for
377 inner levels (since they cannot be recorded as subblocks in that level)
378 must be carried forward so they will later become subblocks of something
379 else. */
380 else if (subblock_chain)
381 current_binding_level->blocks
382 = chainon (current_binding_level->blocks, subblock_chain);
383 if (block)
384 TREE_USED (block) = 1;
385
386 return block;
387 }
388 \f
389 /* Insert BLOCK at the end of the list of subblocks of the
390 current binding level. This is used when a BIND_EXPR is expanded,
391 to handle the BLOCK node inside the BIND_EXPR. */
392
393 void
394 insert_block (tree block)
395 {
396 TREE_USED (block) = 1;
397 current_binding_level->blocks
398 = chainon (current_binding_level->blocks, block);
399 }
400
401 /* Set the BLOCK node for the innermost scope
402 (the one we are currently in). */
403
404 void
405 set_block (tree block)
406 {
407 current_binding_level->this_block = block;
408 current_binding_level->names = chainon (current_binding_level->names,
409 BLOCK_VARS (block));
410 current_binding_level->blocks = chainon (current_binding_level->blocks,
411 BLOCK_SUBBLOCKS (block));
412 }
413
414 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
415 Returns the ..._DECL node. */
416
417 tree
418 pushdecl (tree decl)
419 {
420 struct binding_level *b;
421
422 /* If at top level, there is no context. But PARM_DECLs always go in the
423 level of its function. */
424 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
425 {
426 b = global_binding_level;
427 DECL_CONTEXT (decl) = 0;
428 }
429 else
430 {
431 b = current_binding_level;
432 DECL_CONTEXT (decl) = current_function_decl;
433 }
434
435 /* Put the declaration on the list. The list of declarations is in reverse
436 order. The list will be reversed later if necessary. This needs to be
437 this way for compatibility with the back-end.
438
439 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
440 will cause trouble with the debugger and aren't needed anyway. */
441 if (TREE_CODE (decl) != TYPE_DECL
442 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
443 {
444 TREE_CHAIN (decl) = b->names;
445 b->names = decl;
446 }
447
448 /* For the declaration of a type, set its name if it either is not already
449 set, was set to an IDENTIFIER_NODE, indicating an internal name,
450 or if the previous type name was not derived from a source name.
451 We'd rather have the type named with a real name and all the pointer
452 types to the same object have the same POINTER_TYPE node. Code in this
453 function in c-decl.c makes a copy of the type node here, but that may
454 cause us trouble with incomplete types, so let's not try it (at least
455 for now). */
456
457 if (TREE_CODE (decl) == TYPE_DECL
458 && DECL_NAME (decl) != 0
459 && (TYPE_NAME (TREE_TYPE (decl)) == 0
460 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
461 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
462 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
463 && ! DECL_ARTIFICIAL (decl))))
464 TYPE_NAME (TREE_TYPE (decl)) = decl;
465
466 return decl;
467 }
468 \f
469 /* Do little here. Set up the standard declarations later after the
470 front end has been run. */
471
472 void
473 gnat_init_decl_processing (void)
474 {
475 input_line = 0;
476
477 /* Make the binding_level structure for global names. */
478 current_function_decl = 0;
479 current_binding_level = 0;
480 free_binding_level = 0;
481 pushlevel (0);
482 global_binding_level = current_binding_level;
483
484 build_common_tree_nodes (0);
485
486 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
487 corresponding to the size of Pmode. In most cases when ptr_mode and
488 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
489 far better code using the width of Pmode. Make this here since we need
490 this before we can expand the GNAT types. */
491 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
492 build_common_tree_nodes_2 (0);
493
494 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
495
496 /* We need to make the integer type before doing anything else.
497 We stitch this in to the appropriate GNAT type later. */
498 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
499 integer_type_node));
500 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
501 char_type_node));
502
503 ptr_void_type_node = build_pointer_type (void_type_node);
504
505 }
506
507 /* Create the predefined scalar types such as `integer_type_node' needed
508 in the gcc back-end and initialize the global binding level. */
509
510 void
511 init_gigi_decls (tree long_long_float_type, tree exception_type)
512 {
513 tree endlink, decl;
514 unsigned int i;
515
516 /* Set the types that GCC and Gigi use from the front end. We would like
517 to do this for char_type_node, but it needs to correspond to the C
518 char type. */
519 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
520 {
521 /* In this case, the builtin floating point types are VAX float,
522 so make up a type for use. */
523 longest_float_type_node = make_node (REAL_TYPE);
524 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
525 layout_type (longest_float_type_node);
526 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
527 longest_float_type_node));
528 }
529 else
530 longest_float_type_node = TREE_TYPE (long_long_float_type);
531
532 except_type_node = TREE_TYPE (exception_type);
533
534 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
535 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
536 unsigned_type_node));
537
538 void_type_decl_node
539 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
540 void_type_node));
541
542 void_ftype = build_function_type (void_type_node, NULL_TREE);
543 ptr_void_ftype = build_pointer_type (void_ftype);
544
545 /* Now declare runtime functions. */
546 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
547
548 /* malloc is a function declaration tree for a function to allocate
549 memory. */
550 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
551 NULL_TREE,
552 build_function_type (ptr_void_type_node,
553 tree_cons (NULL_TREE,
554 sizetype,
555 endlink)),
556 NULL_TREE, 0, 1, 1, 0);
557
558 /* free is a function declaration tree for a function to free memory. */
559 free_decl
560 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
561 build_function_type (void_type_node,
562 tree_cons (NULL_TREE,
563 ptr_void_type_node,
564 endlink)),
565 NULL_TREE, 0, 1, 1, 0);
566
567 /* Make the types and functions used for exception processing. */
568 jmpbuf_type
569 = build_array_type (gnat_type_for_mode (Pmode, 0),
570 build_index_type (build_int_2 (5, 0)));
571 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
572 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
573
574 /* Functions to get and set the jumpbuf pointer for the current thread. */
575 get_jmpbuf_decl
576 = create_subprog_decl
577 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
578 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
579 NULL_TREE, 0, 1, 1, 0);
580
581 set_jmpbuf_decl
582 = create_subprog_decl
583 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
584 NULL_TREE,
585 build_function_type (void_type_node,
586 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
587 NULL_TREE, 0, 1, 1, 0);
588
589 /* Function to get the current exception. */
590 get_excptr_decl
591 = create_subprog_decl
592 (get_identifier ("system__soft_links__get_gnat_exception"),
593 NULL_TREE,
594 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
595 NULL_TREE, 0, 1, 1, 0);
596
597 /* Functions that raise exceptions. */
598 raise_nodefer_decl
599 = create_subprog_decl
600 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
601 build_function_type (void_type_node,
602 tree_cons (NULL_TREE,
603 build_pointer_type (except_type_node),
604 endlink)),
605 NULL_TREE, 0, 1, 1, 0);
606
607 /* Hooks to call when entering/leaving an exception handler. */
608 begin_handler_decl
609 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
610 build_function_type (void_type_node,
611 tree_cons (NULL_TREE,
612 ptr_void_type_node,
613 endlink)),
614 NULL_TREE, 0, 1, 1, 0);
615
616 end_handler_decl
617 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
618 build_function_type (void_type_node,
619 tree_cons (NULL_TREE,
620 ptr_void_type_node,
621 endlink)),
622 NULL_TREE, 0, 1, 1, 0);
623
624 /* If in no exception handlers mode, all raise statements are redirected to
625 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
626 this procedure will never be called in this mode. */
627 if (No_Exception_Handlers_Set ())
628 {
629 decl
630 = create_subprog_decl
631 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
632 build_function_type (void_type_node,
633 tree_cons (NULL_TREE,
634 build_pointer_type (char_type_node),
635 tree_cons (NULL_TREE,
636 integer_type_node,
637 endlink))),
638 NULL_TREE, 0, 1, 1, 0);
639
640 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
641 gnat_raise_decls[i] = decl;
642 }
643 else
644 /* Otherwise, make one decl for each exception reason. */
645 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
646 {
647 char name[17];
648
649 sprintf (name, "__gnat_rcheck_%.2d", i);
650 gnat_raise_decls[i]
651 = create_subprog_decl
652 (get_identifier (name), NULL_TREE,
653 build_function_type (void_type_node,
654 tree_cons (NULL_TREE,
655 build_pointer_type
656 (char_type_node),
657 tree_cons (NULL_TREE,
658 integer_type_node,
659 endlink))),
660 NULL_TREE, 0, 1, 1, 0);
661 }
662
663 /* Indicate that these never return. */
664 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
665 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
666 TREE_TYPE (raise_nodefer_decl)
667 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
668 TYPE_QUAL_VOLATILE);
669
670 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
671 {
672 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
673 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
674 TREE_TYPE (gnat_raise_decls[i])
675 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
676 TYPE_QUAL_VOLATILE);
677 }
678
679 /* setjmp returns an integer and has one operand, which is a pointer to
680 a jmpbuf. */
681 setjmp_decl
682 = create_subprog_decl
683 (get_identifier ("__builtin_setjmp"), NULL_TREE,
684 build_function_type (integer_type_node,
685 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
686 NULL_TREE, 0, 1, 1, 0);
687
688 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
689 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
690
691 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
692 address. */
693 update_setjmp_buf_decl
694 = create_subprog_decl
695 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
696 build_function_type (void_type_node,
697 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
698 NULL_TREE, 0, 1, 1, 0);
699
700 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
701 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
702
703 main_identifier_node = get_identifier ("main");
704 }
705 \f
706 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
707 nodes (FIELDLIST), finish constructing the record or union type.
708 If HAS_REP is nonzero, this record has a rep clause; don't call
709 layout_type but merely set the size and alignment ourselves.
710 If DEFER_DEBUG is nonzero, do not call the debugging routines
711 on this type; it will be done later. */
712
713 void
714 finish_record_type (tree record_type,
715 tree fieldlist,
716 int has_rep,
717 int defer_debug)
718 {
719 enum tree_code code = TREE_CODE (record_type);
720 tree ada_size = bitsize_zero_node;
721 tree size = bitsize_zero_node;
722 tree size_unit = size_zero_node;
723 int var_size = 0;
724 tree field;
725
726 TYPE_FIELDS (record_type) = fieldlist;
727
728 if (TYPE_NAME (record_type) != 0
729 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
730 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
731 else
732 TYPE_STUB_DECL (record_type)
733 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
734 record_type));
735
736 /* We don't need both the typedef name and the record name output in
737 the debugging information, since they are the same. */
738 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
739
740 /* Globally initialize the record first. If this is a rep'ed record,
741 that just means some initializations; otherwise, layout the record. */
742
743 if (has_rep)
744 {
745 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
746 TYPE_MODE (record_type) = BLKmode;
747 if (TYPE_SIZE (record_type) == 0)
748 {
749 TYPE_SIZE (record_type) = bitsize_zero_node;
750 TYPE_SIZE_UNIT (record_type) = size_zero_node;
751 }
752 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
753 out just like a UNION_TYPE, since the size will be fixed. */
754 else if (code == QUAL_UNION_TYPE)
755 code = UNION_TYPE;
756 }
757 else
758 {
759 /* Ensure there isn't a size already set. There can be in an error
760 case where there is a rep clause but all fields have errors and
761 no longer have a position. */
762 TYPE_SIZE (record_type) = 0;
763 layout_type (record_type);
764 }
765
766 /* At this point, the position and size of each field is known. It was
767 either set before entry by a rep clause, or by laying out the type above.
768
769 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
770 to compute the Ada size; the GCC size and alignment (for rep'ed records
771 that are not padding types); and the mode (for rep'ed records). We also
772 clear the DECL_BIT_FIELD indication for the cases we know have not been
773 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
774
775 if (code == QUAL_UNION_TYPE)
776 fieldlist = nreverse (fieldlist);
777
778 for (field = fieldlist; field; field = TREE_CHAIN (field))
779 {
780 tree pos = bit_position (field);
781
782 tree type = TREE_TYPE (field);
783 tree this_size = DECL_SIZE (field);
784 tree this_size_unit = DECL_SIZE_UNIT (field);
785 tree this_ada_size = DECL_SIZE (field);
786
787 /* We need to make an XVE/XVU record if any field has variable size,
788 whether or not the record does. For example, if we have an union,
789 it may be that all fields, rounded up to the alignment, have the
790 same size, in which case we'll use that size. But the debug
791 output routines (except Dwarf2) won't be able to output the fields,
792 so we need to make the special record. */
793 if (TREE_CODE (this_size) != INTEGER_CST)
794 var_size = 1;
795
796 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
797 || TREE_CODE (type) == QUAL_UNION_TYPE)
798 && ! TYPE_IS_FAT_POINTER_P (type)
799 && ! TYPE_CONTAINS_TEMPLATE_P (type)
800 && TYPE_ADA_SIZE (type) != 0)
801 this_ada_size = TYPE_ADA_SIZE (type);
802
803 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
804 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
805 && value_factor_p (pos, BITS_PER_UNIT)
806 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
807 DECL_BIT_FIELD (field) = 0;
808
809 /* If we still have DECL_BIT_FIELD set at this point, we know the field
810 is technically not addressable. Except that it can actually be
811 addressed if the field is BLKmode and happens to be properly
812 aligned. */
813 DECL_NONADDRESSABLE_P (field)
814 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
815
816 if (has_rep && ! DECL_BIT_FIELD (field))
817 TYPE_ALIGN (record_type)
818 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
819
820 switch (code)
821 {
822 case UNION_TYPE:
823 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
824 size = size_binop (MAX_EXPR, size, this_size);
825 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
826 break;
827
828 case QUAL_UNION_TYPE:
829 ada_size
830 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
831 this_ada_size, ada_size));
832 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
833 this_size, size));
834 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
835 this_size_unit, size_unit));
836 break;
837
838 case RECORD_TYPE:
839 /* Since we know here that all fields are sorted in order of
840 increasing bit position, the size of the record is one
841 higher than the ending bit of the last field processed
842 unless we have a rep clause, since in that case we might
843 have a field outside a QUAL_UNION_TYPE that has a higher ending
844 position. So use a MAX in that case. Also, if this field is a
845 QUAL_UNION_TYPE, we need to take into account the previous size in
846 the case of empty variants. */
847 ada_size
848 = merge_sizes (ada_size, pos, this_ada_size,
849 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
850 size = merge_sizes (size, pos, this_size,
851 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
852 size_unit
853 = merge_sizes (size_unit, byte_position (field), this_size_unit,
854 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
855 break;
856
857 default:
858 abort ();
859 }
860 }
861
862 if (code == QUAL_UNION_TYPE)
863 nreverse (fieldlist);
864
865 /* If this is a padding record, we never want to make the size smaller than
866 what was specified in it, if any. */
867 if (TREE_CODE (record_type) == RECORD_TYPE
868 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
869 {
870 size = TYPE_SIZE (record_type);
871 size_unit = TYPE_SIZE_UNIT (record_type);
872 }
873
874 /* Now set any of the values we've just computed that apply. */
875 if (! TYPE_IS_FAT_POINTER_P (record_type)
876 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
877 SET_TYPE_ADA_SIZE (record_type, ada_size);
878
879 if (has_rep)
880 {
881 if (! (TREE_CODE (record_type) == RECORD_TYPE
882 && TYPE_IS_PADDING_P (record_type)
883 && CONTAINS_PLACEHOLDER_P (size)))
884 {
885 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
886 TYPE_SIZE_UNIT (record_type)
887 = round_up (size_unit,
888 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
889 }
890
891 compute_record_mode (record_type);
892 }
893
894 if (! defer_debug)
895 {
896 /* If this record is of variable size, rename it so that the
897 debugger knows it is and make a new, parallel, record
898 that tells the debugger how the record is laid out. See
899 exp_dbug.ads. But don't do this for records that are padding
900 since they confuse GDB. */
901 if (var_size
902 && ! (TREE_CODE (record_type) == RECORD_TYPE
903 && TYPE_IS_PADDING_P (record_type)))
904 {
905 tree new_record_type
906 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
907 ? UNION_TYPE : TREE_CODE (record_type));
908 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
909 tree new_id
910 = concat_id_with_name (orig_id,
911 TREE_CODE (record_type) == QUAL_UNION_TYPE
912 ? "XVU" : "XVE");
913 tree last_pos = bitsize_zero_node;
914 tree old_field;
915 tree prev_old_field = 0;
916
917 TYPE_NAME (new_record_type) = new_id;
918 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
919 TYPE_STUB_DECL (new_record_type)
920 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
921 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
922 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
923 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
924 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
925
926 /* Now scan all the fields, replacing each field with a new
927 field corresponding to the new encoding. */
928 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
929 old_field = TREE_CHAIN (old_field))
930 {
931 tree field_type = TREE_TYPE (old_field);
932 tree field_name = DECL_NAME (old_field);
933 tree new_field;
934 tree curpos = bit_position (old_field);
935 int var = 0;
936 unsigned int align = 0;
937 tree pos;
938
939 /* See how the position was modified from the last position.
940
941 There are two basic cases we support: a value was added
942 to the last position or the last position was rounded to
943 a boundary and they something was added. Check for the
944 first case first. If not, see if there is any evidence
945 of rounding. If so, round the last position and try
946 again.
947
948 If this is a union, the position can be taken as zero. */
949
950 if (TREE_CODE (new_record_type) == UNION_TYPE)
951 pos = bitsize_zero_node, align = 0;
952 else
953 pos = compute_related_constant (curpos, last_pos);
954
955 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
956 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
957 {
958 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
959 pos = compute_related_constant (curpos,
960 round_up (last_pos, align));
961 }
962 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
963 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
964 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
965 && host_integerp (TREE_OPERAND
966 (TREE_OPERAND (curpos, 0), 1),
967 1))
968 {
969 align
970 = tree_low_cst
971 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
972 pos = compute_related_constant (curpos,
973 round_up (last_pos, align));
974 }
975 else if (potential_alignment_gap (prev_old_field, old_field,
976 pos))
977 {
978 align = TYPE_ALIGN (field_type);
979 pos = compute_related_constant (curpos,
980 round_up (last_pos, align));
981 }
982
983 /* If we can't compute a position, set it to zero.
984
985 ??? We really should abort here, but it's too much work
986 to get this correct for all cases. */
987
988 if (pos == 0)
989 pos = bitsize_zero_node;
990
991 /* See if this type is variable-size and make a new type
992 and indicate the indirection if so. */
993 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
994 {
995 field_type = build_pointer_type (field_type);
996 var = 1;
997 }
998
999 /* Make a new field name, if necessary. */
1000 if (var || align != 0)
1001 {
1002 char suffix[6];
1003
1004 if (align != 0)
1005 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1006 align / BITS_PER_UNIT);
1007 else
1008 strcpy (suffix, "XVL");
1009
1010 field_name = concat_id_with_name (field_name, suffix);
1011 }
1012
1013 new_field = create_field_decl (field_name, field_type,
1014 new_record_type, 0,
1015 DECL_SIZE (old_field), pos, 0);
1016 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1017 TYPE_FIELDS (new_record_type) = new_field;
1018
1019 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1020 zero. The only time it's not the last field of the record
1021 is when there are other components at fixed positions after
1022 it (meaning there was a rep clause for every field) and we
1023 want to be able to encode them. */
1024 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1025 (TREE_CODE (TREE_TYPE (old_field))
1026 == QUAL_UNION_TYPE)
1027 ? bitsize_zero_node
1028 : DECL_SIZE (old_field));
1029 prev_old_field = old_field;
1030 }
1031
1032 TYPE_FIELDS (new_record_type)
1033 = nreverse (TYPE_FIELDS (new_record_type));
1034
1035 rest_of_type_compilation (new_record_type, global_bindings_p ());
1036 }
1037
1038 rest_of_type_compilation (record_type, global_bindings_p ());
1039 }
1040 }
1041
1042 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1043 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1044 if this represents a QUAL_UNION_TYPE in which case we must look for
1045 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1046 is nonzero, we must take the MAX of the end position of this field
1047 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1048
1049 We return an expression for the size. */
1050
1051 static tree
1052 merge_sizes (tree last_size,
1053 tree first_bit,
1054 tree size,
1055 int special,
1056 int has_rep)
1057 {
1058 tree type = TREE_TYPE (last_size);
1059 tree new;
1060
1061 if (! special || TREE_CODE (size) != COND_EXPR)
1062 {
1063 new = size_binop (PLUS_EXPR, first_bit, size);
1064 if (has_rep)
1065 new = size_binop (MAX_EXPR, last_size, new);
1066 }
1067
1068 else
1069 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1070 integer_zerop (TREE_OPERAND (size, 1))
1071 ? last_size : merge_sizes (last_size, first_bit,
1072 TREE_OPERAND (size, 1),
1073 1, has_rep),
1074 integer_zerop (TREE_OPERAND (size, 2))
1075 ? last_size : merge_sizes (last_size, first_bit,
1076 TREE_OPERAND (size, 2),
1077 1, has_rep)));
1078
1079 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1080 when fed through substitute_in_expr) into thinking that a constant
1081 size is not constant. */
1082 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1083 new = TREE_OPERAND (new, 0);
1084
1085 return new;
1086 }
1087
1088 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1089 related by the addition of a constant. Return that constant if so. */
1090
1091 static tree
1092 compute_related_constant (tree op0, tree op1)
1093 {
1094 tree op0_var, op1_var;
1095 tree op0_con = split_plus (op0, &op0_var);
1096 tree op1_con = split_plus (op1, &op1_var);
1097 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1098
1099 if (operand_equal_p (op0_var, op1_var, 0))
1100 return result;
1101 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1102 return result;
1103 else
1104 return 0;
1105 }
1106
1107 /* Utility function of above to split a tree OP which may be a sum, into a
1108 constant part, which is returned, and a variable part, which is stored
1109 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1110 bitsizetype. */
1111
1112 static tree
1113 split_plus (tree in, tree *pvar)
1114 {
1115 /* Strip NOPS in order to ease the tree traversal and maximize the
1116 potential for constant or plus/minus discovery. We need to be careful
1117 to always return and set *pvar to bitsizetype trees, but it's worth
1118 the effort. */
1119 STRIP_NOPS (in);
1120
1121 *pvar = convert (bitsizetype, in);
1122
1123 if (TREE_CODE (in) == INTEGER_CST)
1124 {
1125 *pvar = bitsize_zero_node;
1126 return convert (bitsizetype, in);
1127 }
1128 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1129 {
1130 tree lhs_var, rhs_var;
1131 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1132 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1133
1134 if (lhs_var == TREE_OPERAND (in, 0)
1135 && rhs_var == TREE_OPERAND (in, 1))
1136 return bitsize_zero_node;
1137
1138 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1139 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1140 }
1141 else
1142 return bitsize_zero_node;
1143 }
1144 \f
1145 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1146 subprogram. If it is void_type_node, then we are dealing with a procedure,
1147 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1148 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1149 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1150 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1151 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1152 RETURNS_WITH_DSP is nonzero if the function is to return with a
1153 depressed stack pointer. */
1154
1155 tree
1156 create_subprog_type (tree return_type,
1157 tree param_decl_list,
1158 tree cico_list,
1159 int returns_unconstrained,
1160 int returns_by_ref,
1161 int returns_with_dsp)
1162 {
1163 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1164 the subprogram formal parameters. This list is generated by traversing the
1165 input list of PARM_DECL nodes. */
1166 tree param_type_list = NULL;
1167 tree param_decl;
1168 tree type;
1169
1170 for (param_decl = param_decl_list; param_decl;
1171 param_decl = TREE_CHAIN (param_decl))
1172 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1173 param_type_list);
1174
1175 /* The list of the function parameter types has to be terminated by the void
1176 type to signal to the back-end that we are not dealing with a variable
1177 parameter subprogram, but that the subprogram has a fixed number of
1178 parameters. */
1179 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1180
1181 /* The list of argument types has been created in reverse
1182 so nreverse it. */
1183 param_type_list = nreverse (param_type_list);
1184
1185 type = build_function_type (return_type, param_type_list);
1186
1187 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1188 or the new type should, make a copy of TYPE. Likewise for
1189 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1190 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1191 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1192 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1193 type = copy_type (type);
1194
1195 SET_TYPE_CI_CO_LIST (type, cico_list);
1196 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1197 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1198 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1199 return type;
1200 }
1201 \f
1202 /* Return a copy of TYPE but safe to modify in any way. */
1203
1204 tree
1205 copy_type (tree type)
1206 {
1207 tree new = copy_node (type);
1208
1209 /* copy_node clears this field instead of copying it, because it is
1210 aliased with TREE_CHAIN. */
1211 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1212
1213 TYPE_POINTER_TO (new) = 0;
1214 TYPE_REFERENCE_TO (new) = 0;
1215 TYPE_MAIN_VARIANT (new) = new;
1216 TYPE_NEXT_VARIANT (new) = 0;
1217
1218 return new;
1219 }
1220 \f
1221 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1222 TYPE_INDEX_TYPE is INDEX. */
1223
1224 tree
1225 create_index_type (tree min, tree max, tree index)
1226 {
1227 /* First build a type for the desired range. */
1228 tree type = build_index_2_type (min, max);
1229
1230 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1231 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1232 is set, but not to INDEX, make a copy of this type with the requested
1233 index type. Note that we have no way of sharing these types, but that's
1234 only a small hole. */
1235 if (TYPE_INDEX_TYPE (type) == index)
1236 return type;
1237 else if (TYPE_INDEX_TYPE (type) != 0)
1238 type = copy_type (type);
1239
1240 SET_TYPE_INDEX_TYPE (type, index);
1241 return type;
1242 }
1243 \f
1244 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1245 string) and TYPE is a ..._TYPE node giving its data type.
1246 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1247 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1248 information about this type. */
1249
1250 tree
1251 create_type_decl (tree type_name,
1252 tree type,
1253 struct attrib *attr_list,
1254 int artificial_p,
1255 int debug_info_p)
1256 {
1257 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1258 enum tree_code code = TREE_CODE (type);
1259
1260 DECL_ARTIFICIAL (type_decl) = artificial_p;
1261 pushdecl (type_decl);
1262 process_attributes (type_decl, attr_list);
1263
1264 /* Pass type declaration information to the debugger unless this is an
1265 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1266 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1267 a dummy type, which will be completed later, or a type for which
1268 debugging information was not requested. */
1269 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1270 || ! debug_info_p)
1271 DECL_IGNORED_P (type_decl) = 1;
1272 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1273 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1274 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1275 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1276
1277 return type_decl;
1278 }
1279
1280 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1281 ASM_NAME is its assembler name (if provided). TYPE is its data type
1282 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1283 expression; NULL_TREE if none.
1284
1285 CONST_FLAG is nonzero if this variable is constant.
1286
1287 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1288 the current compilation unit. This flag should be set when processing the
1289 variable definitions in a package specification. EXTERN_FLAG is nonzero
1290 when processing an external variable declaration (as opposed to a
1291 definition: no storage is to be allocated for the variable here).
1292
1293 STATIC_FLAG is only relevant when not at top level. In that case
1294 it indicates whether to always allocate storage to the variable. */
1295
1296 tree
1297 create_var_decl (tree var_name,
1298 tree asm_name,
1299 tree type,
1300 tree var_init,
1301 int const_flag,
1302 int public_flag,
1303 int extern_flag,
1304 int static_flag,
1305 struct attrib *attr_list)
1306 {
1307 int init_const
1308 = (var_init == 0
1309 ? 0
1310 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1311 && (global_bindings_p () || static_flag
1312 ? 0 != initializer_constant_valid_p (var_init,
1313 TREE_TYPE (var_init))
1314 : TREE_CONSTANT (var_init))));
1315 tree var_decl
1316 = build_decl ((const_flag && init_const
1317 /* Only make a CONST_DECL for sufficiently-small objects.
1318 We consider complex double "sufficiently-small" */
1319 && TYPE_SIZE (type) != 0
1320 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1321 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1322 GET_MODE_SIZE (DCmode)))
1323 ? CONST_DECL : VAR_DECL, var_name, type);
1324 tree assign_init = 0;
1325
1326 /* If this is external, throw away any initializations unless this is a
1327 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1328 we are defining a global here, leave a constant initialization and save
1329 any variable elaborations for the elaboration routine. Otherwise, if
1330 the initializing expression is not the same as TYPE, generate the
1331 initialization with an assignment statement, since it knows how
1332 to do the required adjustents. If we are just annotating types,
1333 throw away the initialization if it isn't a constant. */
1334
1335 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1336 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1337 var_init = 0;
1338
1339 if (global_bindings_p () && var_init != 0 && ! init_const)
1340 {
1341 add_pending_elaborations (var_decl, var_init);
1342 var_init = 0;
1343 }
1344
1345 else if (var_init != 0
1346 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1347 != TYPE_MAIN_VARIANT (type))
1348 || (static_flag && ! init_const)))
1349 assign_init = var_init, var_init = 0;
1350
1351 DECL_COMMON (var_decl) = !flag_no_common;
1352 DECL_INITIAL (var_decl) = var_init;
1353 TREE_READONLY (var_decl) = const_flag;
1354 DECL_EXTERNAL (var_decl) = extern_flag;
1355 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1356 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1357 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1358 = TYPE_VOLATILE (type);
1359
1360 /* At the global binding level we need to allocate static storage for the
1361 variable if and only if its not external. If we are not at the top level
1362 we allocate automatic storage unless requested not to. */
1363 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1364
1365 if (asm_name != 0)
1366 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1367
1368 process_attributes (var_decl, attr_list);
1369
1370 /* Add this decl to the current binding level and generate any
1371 needed code and RTL. */
1372 var_decl = pushdecl (var_decl);
1373 expand_decl (var_decl);
1374
1375 if (DECL_CONTEXT (var_decl) != 0)
1376 expand_decl_init (var_decl);
1377
1378 /* If this is volatile, force it into memory. */
1379 if (TREE_SIDE_EFFECTS (var_decl))
1380 gnat_mark_addressable (var_decl);
1381
1382 if (TREE_CODE (var_decl) != CONST_DECL)
1383 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1384
1385 if (assign_init != 0)
1386 {
1387 /* If VAR_DECL has a padded type, convert it to the unpadded
1388 type so the assignment is done properly. */
1389 tree lhs = var_decl;
1390
1391 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1392 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1393 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1394
1395 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1396 assign_init));
1397 }
1398
1399 return var_decl;
1400 }
1401 \f
1402 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1403 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1404 this field is in a record type with a "pragma pack". If SIZE is nonzero
1405 it is the specified size for this field. If POS is nonzero, it is the bit
1406 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1407 the address of this field for aliasing purposes. */
1408
1409 tree
1410 create_field_decl (tree field_name,
1411 tree field_type,
1412 tree record_type,
1413 int packed,
1414 tree size,
1415 tree pos,
1416 int addressable)
1417 {
1418 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1419
1420 DECL_CONTEXT (field_decl) = record_type;
1421 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1422
1423 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1424 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1425 if (packed && TYPE_MODE (field_type) == BLKmode)
1426 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1427
1428 /* If a size is specified, use it. Otherwise, if the record type is packed
1429 compute a size to use, which may differ from the object's natural size.
1430 We always set a size in this case to trigger the checks for bitfield
1431 creation below, which is typically required when no position has been
1432 specified. */
1433 if (size != 0)
1434 size = convert (bitsizetype, size);
1435 else if (packed == 1)
1436 {
1437 size = rm_size (field_type);
1438
1439 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1440 byte. */
1441 if (TREE_CODE (size) == INTEGER_CST
1442 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1443 size = round_up (size, BITS_PER_UNIT);
1444 }
1445
1446 /* Make a bitfield if a size is specified for two reasons: first if the size
1447 differs from the natural size. Second, if the alignment is insufficient.
1448 There are a number of ways the latter can be true.
1449
1450 We never make a bitfield if the type of the field has a nonconstant size,
1451 or if it is claimed to be addressable, because no such entity requiring
1452 bitfield operations should reach here.
1453
1454 We do *preventively* make a bitfield when there might be the need for it
1455 but we don't have all the necessary information to decide, as is the case
1456 of a field with no specified position in a packed record.
1457
1458 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1459 in layout_decl or finish_record_type to clear the bit_field indication if
1460 it is in fact not needed. */
1461 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1462 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1463 && ! addressable
1464 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1465 || (pos != 0
1466 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1467 bitsize_int (TYPE_ALIGN
1468 (field_type)))))
1469 || packed
1470 || (TYPE_ALIGN (record_type) != 0
1471 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1472 {
1473 DECL_BIT_FIELD (field_decl) = 1;
1474 DECL_SIZE (field_decl) = size;
1475 if (! packed && pos == 0)
1476 DECL_ALIGN (field_decl)
1477 = (TYPE_ALIGN (record_type) != 0
1478 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1479 : TYPE_ALIGN (field_type));
1480 }
1481
1482 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1483 DECL_ALIGN (field_decl)
1484 = MAX (DECL_ALIGN (field_decl),
1485 DECL_BIT_FIELD (field_decl) ? 1
1486 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1487 : TYPE_ALIGN (field_type));
1488
1489 if (pos != 0)
1490 {
1491 /* We need to pass in the alignment the DECL is known to have.
1492 This is the lowest-order bit set in POS, but no more than
1493 the alignment of the record, if one is specified. Note
1494 that an alignment of 0 is taken as infinite. */
1495 unsigned int known_align;
1496
1497 if (host_integerp (pos, 1))
1498 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1499 else
1500 known_align = BITS_PER_UNIT;
1501
1502 if (TYPE_ALIGN (record_type)
1503 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1504 known_align = TYPE_ALIGN (record_type);
1505
1506 layout_decl (field_decl, known_align);
1507 SET_DECL_OFFSET_ALIGN (field_decl,
1508 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1509 : BITS_PER_UNIT);
1510 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1511 &DECL_FIELD_BIT_OFFSET (field_decl),
1512 DECL_OFFSET_ALIGN (field_decl), pos);
1513
1514 DECL_HAS_REP_P (field_decl) = 1;
1515 }
1516
1517 /* If the field type is passed by reference, we will have pointers to the
1518 field, so it is addressable. */
1519 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1520 addressable = 1;
1521
1522 /* ??? For now, we say that any field of aggregate type is addressable
1523 because the front end may take 'Reference of it. */
1524 if (AGGREGATE_TYPE_P (field_type))
1525 addressable = 1;
1526
1527 /* Mark the decl as nonaddressable if it is indicated so semantically,
1528 meaning we won't ever attempt to take the address of the field.
1529
1530 It may also be "technically" nonaddressable, meaning that even if we
1531 attempt to take the field's address we will actually get the address of a
1532 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1533 we have at this point is not accurate enough, so we don't account for
1534 this here and let finish_record_type decide. */
1535 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1536
1537 return field_decl;
1538 }
1539
1540 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1541 effects, has the value of zero. */
1542
1543 static int
1544 value_zerop (tree exp)
1545 {
1546 if (TREE_CODE (exp) == COMPOUND_EXPR)
1547 return value_zerop (TREE_OPERAND (exp, 1));
1548
1549 return integer_zerop (exp);
1550 }
1551 \f
1552 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1553 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1554 readonly (either an IN parameter or an address of a pass-by-ref
1555 parameter). */
1556
1557 tree
1558 create_param_decl (tree param_name, tree param_type, int readonly)
1559 {
1560 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1561
1562 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1563 lead to various ABI violations. */
1564 if (targetm.calls.promote_prototypes (param_type)
1565 && (TREE_CODE (param_type) == INTEGER_TYPE
1566 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1567 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1568 {
1569 /* We have to be careful about biased types here. Make a subtype
1570 of integer_type_node with the proper biasing. */
1571 if (TREE_CODE (param_type) == INTEGER_TYPE
1572 && TYPE_BIASED_REPRESENTATION_P (param_type))
1573 {
1574 param_type
1575 = copy_type (build_range_type (integer_type_node,
1576 TYPE_MIN_VALUE (param_type),
1577 TYPE_MAX_VALUE (param_type)));
1578
1579 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1580 }
1581 else
1582 param_type = integer_type_node;
1583 }
1584
1585 DECL_ARG_TYPE (param_decl) = param_type;
1586 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1587 TREE_READONLY (param_decl) = readonly;
1588 return param_decl;
1589 }
1590 \f
1591 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1592
1593 void
1594 process_attributes (tree decl, struct attrib *attr_list)
1595 {
1596 for (; attr_list; attr_list = attr_list->next)
1597 switch (attr_list->type)
1598 {
1599 case ATTR_MACHINE_ATTRIBUTE:
1600 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1601 NULL_TREE),
1602 ATTR_FLAG_TYPE_IN_PLACE);
1603 break;
1604
1605 case ATTR_LINK_ALIAS:
1606 TREE_STATIC (decl) = 1;
1607 assemble_alias (decl, attr_list->name);
1608 break;
1609
1610 case ATTR_WEAK_EXTERNAL:
1611 if (SUPPORTS_WEAK)
1612 declare_weak (decl);
1613 else
1614 post_error ("?weak declarations not supported on this target",
1615 attr_list->error_point);
1616 break;
1617
1618 case ATTR_LINK_SECTION:
1619 if (targetm.have_named_sections)
1620 {
1621 DECL_SECTION_NAME (decl)
1622 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1623 IDENTIFIER_POINTER (attr_list->name));
1624 DECL_COMMON (decl) = 0;
1625 }
1626 else
1627 post_error ("?section attributes are not supported for this target",
1628 attr_list->error_point);
1629 break;
1630 }
1631 }
1632 \f
1633 /* Add some pending elaborations on the list. */
1634
1635 void
1636 add_pending_elaborations (tree var_decl, tree var_init)
1637 {
1638 if (var_init != 0)
1639 Check_Elaboration_Code_Allowed (error_gnat_node);
1640
1641 pending_elaborations
1642 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1643 }
1644
1645 /* Obtain any pending elaborations and clear the old list. */
1646
1647 tree
1648 get_pending_elaborations (void)
1649 {
1650 /* Each thing added to the list went on the end; we want it on the
1651 beginning. */
1652 tree result = TREE_CHAIN (pending_elaborations);
1653
1654 TREE_CHAIN (pending_elaborations) = 0;
1655 return result;
1656 }
1657
1658 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1659 of 2. */
1660
1661 static int
1662 value_factor_p (tree value, int factor)
1663 {
1664 if (host_integerp (value, 1))
1665 return tree_low_cst (value, 1) % factor == 0;
1666
1667 if (TREE_CODE (value) == MULT_EXPR)
1668 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1669 || value_factor_p (TREE_OPERAND (value, 1), factor));
1670
1671 return 0;
1672 }
1673
1674 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1675 unless we can prove these 2 fields are laid out in such a way that no gap
1676 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1677 is the distance in bits between the end of PREV_FIELD and the starting
1678 position of CURR_FIELD. It is ignored if null. */
1679
1680 static int
1681 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1682 {
1683 /* If this is the first field of the record, there cannot be any gap */
1684 if (!prev_field)
1685 return 0;
1686
1687 /* If the previous field is a union type, then return False: The only
1688 time when such a field is not the last field of the record is when
1689 there are other components at fixed positions after it (meaning there
1690 was a rep clause for every field), in which case we don't want the
1691 alignment constraint to override them. */
1692 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1693 return 0;
1694
1695 /* If the distance between the end of prev_field and the begining of
1696 curr_field is constant, then there is a gap if the value of this
1697 constant is not null. */
1698 if (offset && host_integerp (offset, 1))
1699 return (!integer_zerop (offset));
1700
1701 /* If the size and position of the previous field are constant,
1702 then check the sum of this size and position. There will be a gap
1703 iff it is not multiple of the current field alignment. */
1704 if (host_integerp (DECL_SIZE (prev_field), 1)
1705 && host_integerp (bit_position (prev_field), 1))
1706 return ((tree_low_cst (bit_position (prev_field), 1)
1707 + tree_low_cst (DECL_SIZE (prev_field), 1))
1708 % DECL_ALIGN (curr_field) != 0);
1709
1710 /* If both the position and size of the previous field are multiples
1711 of the current field alignment, there can not be any gap. */
1712 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1713 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1714 return 0;
1715
1716 /* Fallback, return that there may be a potential gap */
1717 return 1;
1718 }
1719
1720 /* Return nonzero if there are pending elaborations. */
1721
1722 int
1723 pending_elaborations_p (void)
1724 {
1725 return TREE_CHAIN (pending_elaborations) != 0;
1726 }
1727
1728 /* Save a copy of the current pending elaboration list and make a new
1729 one. */
1730
1731 void
1732 push_pending_elaborations (void)
1733 {
1734 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1735
1736 p->next = elist_stack;
1737 p->elab_list = pending_elaborations;
1738 elist_stack = p;
1739 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1740 }
1741
1742 /* Pop the stack of pending elaborations. */
1743
1744 void
1745 pop_pending_elaborations (void)
1746 {
1747 struct e_stack *p = elist_stack;
1748
1749 pending_elaborations = p->elab_list;
1750 elist_stack = p->next;
1751 }
1752
1753 /* Return the current position in pending_elaborations so we can insert
1754 elaborations after that point. */
1755
1756 tree
1757 get_elaboration_location (void)
1758 {
1759 return tree_last (pending_elaborations);
1760 }
1761
1762 /* Insert the current elaborations after ELAB, which is in some elaboration
1763 list. */
1764
1765 void
1766 insert_elaboration_list (tree elab)
1767 {
1768 tree next = TREE_CHAIN (elab);
1769
1770 if (TREE_CHAIN (pending_elaborations))
1771 {
1772 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1773 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1774 TREE_CHAIN (pending_elaborations) = 0;
1775 }
1776 }
1777
1778 /* Returns a LABEL_DECL node for LABEL_NAME. */
1779
1780 tree
1781 create_label_decl (tree label_name)
1782 {
1783 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1784
1785 DECL_CONTEXT (label_decl) = current_function_decl;
1786 DECL_MODE (label_decl) = VOIDmode;
1787 DECL_SOURCE_LOCATION (label_decl) = input_location;
1788
1789 return label_decl;
1790 }
1791 \f
1792 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1793 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1794 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1795 PARM_DECL nodes chained through the TREE_CHAIN field).
1796
1797 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1798 appropriate fields in the FUNCTION_DECL. */
1799
1800 tree
1801 create_subprog_decl (tree subprog_name,
1802 tree asm_name,
1803 tree subprog_type,
1804 tree param_decl_list,
1805 int inline_flag,
1806 int public_flag,
1807 int extern_flag,
1808 struct attrib *attr_list)
1809 {
1810 tree return_type = TREE_TYPE (subprog_type);
1811 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1812
1813 /* If this is a function nested inside an inlined external function, it
1814 means we aren't going to compile the outer function unless it is
1815 actually inlined, so do the same for us. */
1816 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1817 && DECL_EXTERNAL (current_function_decl))
1818 extern_flag = 1;
1819
1820 DECL_EXTERNAL (subprog_decl) = extern_flag;
1821 TREE_PUBLIC (subprog_decl) = public_flag;
1822 DECL_INLINE (subprog_decl) = inline_flag;
1823 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1824 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1825 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1826 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1827 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1828
1829 if (asm_name != 0)
1830 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1831
1832 process_attributes (subprog_decl, attr_list);
1833
1834 /* Add this decl to the current binding level. */
1835 subprog_decl = pushdecl (subprog_decl);
1836
1837 /* Output the assembler code and/or RTL for the declaration. */
1838 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1839
1840 return subprog_decl;
1841 }
1842 \f
1843 /* Count how deep we are into nested functions. This is because
1844 we shouldn't call the backend function context routines unless we
1845 are in a nested function. */
1846
1847 static int function_nesting_depth;
1848
1849 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1850 body. This routine needs to be invoked before processing the declarations
1851 appearing in the subprogram. */
1852
1853 void
1854 begin_subprog_body (tree subprog_decl)
1855 {
1856 tree param_decl;
1857
1858 if (function_nesting_depth++ != 0)
1859 push_function_context ();
1860
1861 announce_function (subprog_decl);
1862
1863 /* Make this field nonzero so further routines know that this is not
1864 tentative. error_mark_node is replaced below (in poplevel) with the
1865 adequate BLOCK. */
1866 DECL_INITIAL (subprog_decl) = error_mark_node;
1867
1868 /* This function exists in static storage. This does not mean `static' in
1869 the C sense! */
1870 TREE_STATIC (subprog_decl) = 1;
1871
1872 /* Enter a new binding level and show that all the parameters belong to
1873 this function. */
1874 current_function_decl = subprog_decl;
1875 pushlevel (0);
1876
1877 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1878 param_decl = TREE_CHAIN (param_decl))
1879 DECL_CONTEXT (param_decl) = subprog_decl;
1880
1881 init_function_start (subprog_decl);
1882 expand_function_start (subprog_decl, 0);
1883
1884 /* If this function is `main', emit a call to `__main'
1885 to run global initializers, etc. */
1886 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1887 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1888 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1889 expand_main_function ();
1890 }
1891
1892 /* Finish the definition of the current subprogram and compile it all the way
1893 to assembler language output. */
1894
1895 void
1896 end_subprog_body (void)
1897 {
1898 tree decl;
1899 tree cico_list;
1900
1901 poplevel (1, 0, 1);
1902 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1903 = current_function_decl;
1904
1905 /* Mark the RESULT_DECL as being in this subprogram. */
1906 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1907
1908 expand_function_end ();
1909
1910 /* If this is a nested function, push a new GC context. That will keep
1911 local variables on the stack from being collected while we're doing
1912 the compilation of this function. */
1913 if (function_nesting_depth > 1)
1914 ggc_push_context ();
1915
1916 /* If we're only annotating types, don't actually compile this
1917 function. */
1918 if (!type_annotate_only)
1919 {
1920 rest_of_compilation (current_function_decl);
1921 if (! DECL_DEFER_OUTPUT (current_function_decl))
1922 {
1923 free_after_compilation (cfun);
1924 DECL_STRUCT_FUNCTION (current_function_decl) = 0;
1925 }
1926 cfun = 0;
1927 }
1928
1929 if (function_nesting_depth > 1)
1930 ggc_pop_context ();
1931
1932 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1933 not be seen when we call this function and will be in
1934 unallocated memory anyway. */
1935 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1936 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1937 TREE_VALUE (cico_list) = 0;
1938
1939 if (DECL_STRUCT_FUNCTION (current_function_decl) == 0)
1940 {
1941 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1942 was saved for inline, in which case the DECL_RTLs are in
1943 preserved memory. */
1944 for (decl = DECL_ARGUMENTS (current_function_decl);
1945 decl != 0; decl = TREE_CHAIN (decl))
1946 {
1947 SET_DECL_RTL (decl, 0);
1948 DECL_INCOMING_RTL (decl) = 0;
1949 }
1950
1951 /* Similarly, discard DECL_RTL of the return value. */
1952 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1953
1954 /* But DECL_INITIAL must remain nonzero so we know this
1955 was an actual function definition unless toplev.c decided not
1956 to inline it. */
1957 if (DECL_INITIAL (current_function_decl) != 0)
1958 DECL_INITIAL (current_function_decl) = error_mark_node;
1959
1960 DECL_ARGUMENTS (current_function_decl) = 0;
1961 }
1962
1963 /* If we are not at the bottom of the function nesting stack, pop up to
1964 the containing function. Otherwise show we aren't in any function. */
1965 if (--function_nesting_depth != 0)
1966 pop_function_context ();
1967 else
1968 current_function_decl = 0;
1969 }
1970 \f
1971 /* Return a definition for a builtin function named NAME and whose data type
1972 is TYPE. TYPE should be a function type with argument types.
1973 FUNCTION_CODE tells later passes how to compile calls to this function.
1974 See tree.h for its possible values.
1975
1976 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1977 the name to be called if we can't opencode the function. If
1978 ATTRS is nonzero, use that for the function attribute list. */
1979
1980 tree
1981 builtin_function (const char *name,
1982 tree type,
1983 int function_code,
1984 enum built_in_class class,
1985 const char *library_name,
1986 tree attrs)
1987 {
1988 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1989
1990 DECL_EXTERNAL (decl) = 1;
1991 TREE_PUBLIC (decl) = 1;
1992 if (library_name)
1993 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1994
1995 pushdecl (decl);
1996 DECL_BUILT_IN_CLASS (decl) = class;
1997 DECL_FUNCTION_CODE (decl) = function_code;
1998 if (attrs)
1999 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
2000 return decl;
2001 }
2002
2003 /* Return an integer type with the number of bits of precision given by
2004 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2005 it is a signed type. */
2006
2007 tree
2008 gnat_type_for_size (unsigned precision, int unsignedp)
2009 {
2010 tree t;
2011 char type_name[20];
2012
2013 if (precision <= 2 * MAX_BITS_PER_WORD
2014 && signed_and_unsigned_types[precision][unsignedp] != 0)
2015 return signed_and_unsigned_types[precision][unsignedp];
2016
2017 if (unsignedp)
2018 t = make_unsigned_type (precision);
2019 else
2020 t = make_signed_type (precision);
2021
2022 if (precision <= 2 * MAX_BITS_PER_WORD)
2023 signed_and_unsigned_types[precision][unsignedp] = t;
2024
2025 if (TYPE_NAME (t) == 0)
2026 {
2027 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2028 TYPE_NAME (t) = get_identifier (type_name);
2029 }
2030
2031 return t;
2032 }
2033
2034 /* Likewise for floating-point types. */
2035
2036 static tree
2037 float_type_for_precision (int precision, enum machine_mode mode)
2038 {
2039 tree t;
2040 char type_name[20];
2041
2042 if (float_types[(int) mode] != 0)
2043 return float_types[(int) mode];
2044
2045 float_types[(int) mode] = t = make_node (REAL_TYPE);
2046 TYPE_PRECISION (t) = precision;
2047 layout_type (t);
2048
2049 if (TYPE_MODE (t) != mode)
2050 gigi_abort (414);
2051
2052 if (TYPE_NAME (t) == 0)
2053 {
2054 sprintf (type_name, "FLOAT_%d", precision);
2055 TYPE_NAME (t) = get_identifier (type_name);
2056 }
2057
2058 return t;
2059 }
2060
2061 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2062 an unsigned type; otherwise a signed type is returned. */
2063
2064 tree
2065 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2066 {
2067 if (mode == BLKmode)
2068 return NULL_TREE;
2069 else if (mode == VOIDmode)
2070 return void_type_node;
2071 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2072 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2073 else
2074 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2075 }
2076
2077 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2078
2079 tree
2080 gnat_unsigned_type (tree type_node)
2081 {
2082 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2083
2084 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2085 {
2086 type = copy_node (type);
2087 TREE_TYPE (type) = type_node;
2088 }
2089 else if (TREE_TYPE (type_node) != 0
2090 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2091 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2092 {
2093 type = copy_node (type);
2094 TREE_TYPE (type) = TREE_TYPE (type_node);
2095 }
2096
2097 return type;
2098 }
2099
2100 /* Return the signed version of a TYPE_NODE, a scalar type. */
2101
2102 tree
2103 gnat_signed_type (tree type_node)
2104 {
2105 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2106
2107 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2108 {
2109 type = copy_node (type);
2110 TREE_TYPE (type) = type_node;
2111 }
2112 else if (TREE_TYPE (type_node) != 0
2113 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2114 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2115 {
2116 type = copy_node (type);
2117 TREE_TYPE (type) = TREE_TYPE (type_node);
2118 }
2119
2120 return type;
2121 }
2122
2123 /* Return a type the same as TYPE except unsigned or signed according to
2124 UNSIGNEDP. */
2125
2126 tree
2127 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2128 {
2129 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
2130 return type;
2131 else
2132 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2133 }
2134 \f
2135 /* EXP is an expression for the size of an object. If this size contains
2136 discriminant references, replace them with the maximum (if MAX_P) or
2137 minimum (if ! MAX_P) possible value of the discriminant. */
2138
2139 tree
2140 max_size (tree exp, int max_p)
2141 {
2142 enum tree_code code = TREE_CODE (exp);
2143 tree type = TREE_TYPE (exp);
2144
2145 switch (TREE_CODE_CLASS (code))
2146 {
2147 case 'd':
2148 case 'c':
2149 return exp;
2150
2151 case 'x':
2152 if (code == TREE_LIST)
2153 return tree_cons (TREE_PURPOSE (exp),
2154 max_size (TREE_VALUE (exp), max_p),
2155 TREE_CHAIN (exp) != 0
2156 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2157 break;
2158
2159 case 'r':
2160 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2161 modify. Otherwise, we treat it like a variable. */
2162 if (! CONTAINS_PLACEHOLDER_P (exp))
2163 return exp;
2164
2165 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2166 return
2167 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2168
2169 case '<':
2170 return max_p ? size_one_node : size_zero_node;
2171
2172 case '1':
2173 case '2':
2174 case 'e':
2175 switch (TREE_CODE_LENGTH (code))
2176 {
2177 case 1:
2178 if (code == NON_LVALUE_EXPR)
2179 return max_size (TREE_OPERAND (exp, 0), max_p);
2180 else
2181 return
2182 fold (build1 (code, type,
2183 max_size (TREE_OPERAND (exp, 0),
2184 code == NEGATE_EXPR ? ! max_p : max_p)));
2185
2186 case 2:
2187 if (code == RTL_EXPR)
2188 gigi_abort (407);
2189 else if (code == COMPOUND_EXPR)
2190 return max_size (TREE_OPERAND (exp, 1), max_p);
2191
2192 {
2193 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2194 tree rhs = max_size (TREE_OPERAND (exp, 1),
2195 code == MINUS_EXPR ? ! max_p : max_p);
2196
2197 /* Special-case wanting the maximum value of a MIN_EXPR.
2198 In that case, if one side overflows, return the other.
2199 sizetype is signed, but we know sizes are non-negative.
2200 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2201 overflowing or the maximum possible value and the RHS
2202 a variable. */
2203 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2204 return lhs;
2205 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2206 return rhs;
2207 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2208 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2209 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2210 && ! TREE_CONSTANT (rhs))
2211 return lhs;
2212 else
2213 return fold (build (code, type, lhs, rhs));
2214 }
2215
2216 case 3:
2217 if (code == SAVE_EXPR)
2218 return exp;
2219 else if (code == COND_EXPR)
2220 return fold (build (MAX_EXPR, type,
2221 max_size (TREE_OPERAND (exp, 1), max_p),
2222 max_size (TREE_OPERAND (exp, 2), max_p)));
2223 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2224 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2225 max_size (TREE_OPERAND (exp, 1), max_p));
2226 }
2227 }
2228
2229 gigi_abort (408);
2230 }
2231 \f
2232 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2233 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2234 Return a constructor for the template. */
2235
2236 tree
2237 build_template (tree template_type, tree array_type, tree expr)
2238 {
2239 tree template_elts = NULL_TREE;
2240 tree bound_list = NULL_TREE;
2241 tree field;
2242
2243 if (TREE_CODE (array_type) == RECORD_TYPE
2244 && (TYPE_IS_PADDING_P (array_type)
2245 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2246 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2247
2248 if (TREE_CODE (array_type) == ARRAY_TYPE
2249 || (TREE_CODE (array_type) == INTEGER_TYPE
2250 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2251 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2252
2253 /* First make the list for a CONSTRUCTOR for the template. Go down the
2254 field list of the template instead of the type chain because this
2255 array might be an Ada array of arrays and we can't tell where the
2256 nested arrays stop being the underlying object. */
2257
2258 for (field = TYPE_FIELDS (template_type); field;
2259 (bound_list != 0
2260 ? (bound_list = TREE_CHAIN (bound_list))
2261 : (array_type = TREE_TYPE (array_type))),
2262 field = TREE_CHAIN (TREE_CHAIN (field)))
2263 {
2264 tree bounds, min, max;
2265
2266 /* If we have a bound list, get the bounds from there. Likewise
2267 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2268 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2269 This will give us a maximum range. */
2270 if (bound_list != 0)
2271 bounds = TREE_VALUE (bound_list);
2272 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2273 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2274 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2275 && DECL_BY_COMPONENT_PTR_P (expr))
2276 bounds = TREE_TYPE (field);
2277 else
2278 gigi_abort (411);
2279
2280 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2281 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2282
2283 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2284 substitute it from OBJECT. */
2285 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2286 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2287
2288 template_elts = tree_cons (TREE_CHAIN (field), max,
2289 tree_cons (field, min, template_elts));
2290 }
2291
2292 return gnat_build_constructor (template_type, nreverse (template_elts));
2293 }
2294 \f
2295 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2296 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2297 in the type contains in its DECL_INITIAL the expression to use when
2298 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2299 to print out an error message if the mechanism cannot be applied to
2300 an object of that type and also for the name. */
2301
2302 tree
2303 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2304 {
2305 tree record_type = make_node (RECORD_TYPE);
2306 tree field_list = 0;
2307 int class;
2308 int dtype = 0;
2309 tree inner_type;
2310 int ndim;
2311 int i;
2312 tree *idx_arr;
2313 tree tem;
2314
2315 /* If TYPE is an unconstrained array, use the underlying array type. */
2316 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2317 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2318
2319 /* If this is an array, compute the number of dimensions in the array,
2320 get the index types, and point to the inner type. */
2321 if (TREE_CODE (type) != ARRAY_TYPE)
2322 ndim = 0;
2323 else
2324 for (ndim = 1, inner_type = type;
2325 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2326 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2327 ndim++, inner_type = TREE_TYPE (inner_type))
2328 ;
2329
2330 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2331
2332 if (mech != By_Descriptor_NCA
2333 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2334 for (i = ndim - 1, inner_type = type;
2335 i >= 0;
2336 i--, inner_type = TREE_TYPE (inner_type))
2337 idx_arr[i] = TYPE_DOMAIN (inner_type);
2338 else
2339 for (i = 0, inner_type = type;
2340 i < ndim;
2341 i++, inner_type = TREE_TYPE (inner_type))
2342 idx_arr[i] = TYPE_DOMAIN (inner_type);
2343
2344 /* Now get the DTYPE value. */
2345 switch (TREE_CODE (type))
2346 {
2347 case INTEGER_TYPE:
2348 case ENUMERAL_TYPE:
2349 if (TYPE_VAX_FLOATING_POINT_P (type))
2350 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2351 {
2352 case 6:
2353 dtype = 10;
2354 break;
2355 case 9:
2356 dtype = 11;
2357 break;
2358 case 15:
2359 dtype = 27;
2360 break;
2361 }
2362 else
2363 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2364 {
2365 case 8:
2366 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2367 break;
2368 case 16:
2369 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2370 break;
2371 case 32:
2372 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2373 break;
2374 case 64:
2375 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2376 break;
2377 case 128:
2378 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2379 break;
2380 }
2381 break;
2382
2383 case REAL_TYPE:
2384 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2385 break;
2386
2387 case COMPLEX_TYPE:
2388 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2389 && TYPE_VAX_FLOATING_POINT_P (type))
2390 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2391 {
2392 case 6:
2393 dtype = 12;
2394 break;
2395 case 9:
2396 dtype = 13;
2397 break;
2398 case 15:
2399 dtype = 29;
2400 }
2401 else
2402 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2403 break;
2404
2405 case ARRAY_TYPE:
2406 dtype = 14;
2407 break;
2408
2409 default:
2410 break;
2411 }
2412
2413 /* Get the CLASS value. */
2414 switch (mech)
2415 {
2416 case By_Descriptor_A:
2417 class = 4;
2418 break;
2419 case By_Descriptor_NCA:
2420 class = 10;
2421 break;
2422 case By_Descriptor_SB:
2423 class = 15;
2424 break;
2425 default:
2426 class = 1;
2427 }
2428
2429 /* Make the type for a descriptor for VMS. The first four fields
2430 are the same for all types. */
2431
2432 field_list
2433 = chainon (field_list,
2434 make_descriptor_field
2435 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2436 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2437
2438 field_list = chainon (field_list,
2439 make_descriptor_field ("DTYPE",
2440 gnat_type_for_size (8, 1),
2441 record_type, size_int (dtype)));
2442 field_list = chainon (field_list,
2443 make_descriptor_field ("CLASS",
2444 gnat_type_for_size (8, 1),
2445 record_type, size_int (class)));
2446
2447 field_list
2448 = chainon (field_list,
2449 make_descriptor_field ("POINTER",
2450 build_pointer_type (type),
2451 record_type,
2452 build1 (ADDR_EXPR,
2453 build_pointer_type (type),
2454 build (PLACEHOLDER_EXPR,
2455 type))));
2456
2457 switch (mech)
2458 {
2459 case By_Descriptor:
2460 case By_Descriptor_S:
2461 break;
2462
2463 case By_Descriptor_SB:
2464 field_list
2465 = chainon (field_list,
2466 make_descriptor_field
2467 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2468 TREE_CODE (type) == ARRAY_TYPE
2469 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2470 field_list
2471 = chainon (field_list,
2472 make_descriptor_field
2473 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2474 TREE_CODE (type) == ARRAY_TYPE
2475 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2476 break;
2477
2478 case By_Descriptor_A:
2479 case By_Descriptor_NCA:
2480 field_list = chainon (field_list,
2481 make_descriptor_field ("SCALE",
2482 gnat_type_for_size (8, 1),
2483 record_type,
2484 size_zero_node));
2485
2486 field_list = chainon (field_list,
2487 make_descriptor_field ("DIGITS",
2488 gnat_type_for_size (8, 1),
2489 record_type,
2490 size_zero_node));
2491
2492 field_list
2493 = chainon (field_list,
2494 make_descriptor_field
2495 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2496 size_int (mech == By_Descriptor_NCA
2497 ? 0
2498 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2499 : (TREE_CODE (type) == ARRAY_TYPE
2500 && TYPE_CONVENTION_FORTRAN_P (type)
2501 ? 224 : 192))));
2502
2503 field_list = chainon (field_list,
2504 make_descriptor_field ("DIMCT",
2505 gnat_type_for_size (8, 1),
2506 record_type,
2507 size_int (ndim)));
2508
2509 field_list = chainon (field_list,
2510 make_descriptor_field ("ARSIZE",
2511 gnat_type_for_size (32, 1),
2512 record_type,
2513 size_in_bytes (type)));
2514
2515 /* Now build a pointer to the 0,0,0... element. */
2516 tem = build (PLACEHOLDER_EXPR, type);
2517 for (i = 0, inner_type = type; i < ndim;
2518 i++, inner_type = TREE_TYPE (inner_type))
2519 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2520 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2521
2522 field_list
2523 = chainon (field_list,
2524 make_descriptor_field
2525 ("A0", build_pointer_type (inner_type), record_type,
2526 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2527
2528 /* Next come the addressing coefficients. */
2529 tem = size_int (1);
2530 for (i = 0; i < ndim; i++)
2531 {
2532 char fname[3];
2533 tree idx_length
2534 = size_binop (MULT_EXPR, tem,
2535 size_binop (PLUS_EXPR,
2536 size_binop (MINUS_EXPR,
2537 TYPE_MAX_VALUE (idx_arr[i]),
2538 TYPE_MIN_VALUE (idx_arr[i])),
2539 size_int (1)));
2540
2541 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2542 fname[1] = '0' + i, fname[2] = 0;
2543 field_list
2544 = chainon (field_list,
2545 make_descriptor_field (fname,
2546 gnat_type_for_size (32, 1),
2547 record_type, idx_length));
2548
2549 if (mech == By_Descriptor_NCA)
2550 tem = idx_length;
2551 }
2552
2553 /* Finally here are the bounds. */
2554 for (i = 0; i < ndim; i++)
2555 {
2556 char fname[3];
2557
2558 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2559 field_list
2560 = chainon (field_list,
2561 make_descriptor_field
2562 (fname, gnat_type_for_size (32, 1), record_type,
2563 TYPE_MIN_VALUE (idx_arr[i])));
2564
2565 fname[0] = 'U';
2566 field_list
2567 = chainon (field_list,
2568 make_descriptor_field
2569 (fname, gnat_type_for_size (32, 1), record_type,
2570 TYPE_MAX_VALUE (idx_arr[i])));
2571 }
2572 break;
2573
2574 default:
2575 post_error ("unsupported descriptor type for &", gnat_entity);
2576 }
2577
2578 finish_record_type (record_type, field_list, 0, 1);
2579 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2580 record_type));
2581
2582 return record_type;
2583 }
2584
2585 /* Utility routine for above code to make a field. */
2586
2587 static tree
2588 make_descriptor_field (const char *name, tree type,
2589 tree rec_type, tree initial)
2590 {
2591 tree field
2592 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2593
2594 DECL_INITIAL (field) = initial;
2595 return field;
2596 }
2597 \f
2598 /* Build a type to be used to represent an aliased object whose nominal
2599 type is an unconstrained array. This consists of a RECORD_TYPE containing
2600 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2601 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2602 is used to represent an arbitrary unconstrained object. Use NAME
2603 as the name of the record. */
2604
2605 tree
2606 build_unc_object_type (tree template_type, tree object_type, tree name)
2607 {
2608 tree type = make_node (RECORD_TYPE);
2609 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2610 template_type, type, 0, 0, 0, 1);
2611 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2612 type, 0, 0, 0, 1);
2613
2614 TYPE_NAME (type) = name;
2615 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2616 finish_record_type (type,
2617 chainon (chainon (NULL_TREE, template_field),
2618 array_field),
2619 0, 0);
2620
2621 return type;
2622 }
2623 \f
2624 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2625 the normal case this is just two adjustments, but we have more to do
2626 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2627
2628 void
2629 update_pointer_to (tree old_type, tree new_type)
2630 {
2631 tree ptr = TYPE_POINTER_TO (old_type);
2632 tree ref = TYPE_REFERENCE_TO (old_type);
2633 tree type;
2634
2635 /* If this is the main variant, process all the other variants first. */
2636 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2637 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2638 type = TYPE_NEXT_VARIANT (type))
2639 update_pointer_to (type, new_type);
2640
2641 /* If no pointer or reference, we are done. */
2642 if (ptr == 0 && ref == 0)
2643 return;
2644
2645 /* Merge the old type qualifiers in the new type.
2646
2647 Each old variant has qualifiers for specific reasons, and the new
2648 designated type as well. Each set of qualifiers represents useful
2649 information grabbed at some point, and merging the two simply unifies
2650 these inputs into the final type description.
2651
2652 Consider for instance a volatile type frozen after an access to constant
2653 type designating it. After the designated type freeze, we get here with a
2654 volatile new_type and a dummy old_type with a readonly variant, created
2655 when the access type was processed. We shall make a volatile and readonly
2656 designated type, because that's what it really is.
2657
2658 We might also get here for a non-dummy old_type variant with different
2659 qualifiers than the new_type ones, for instance in some cases of pointers
2660 to private record type elaboration (see the comments around the call to
2661 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2662 qualifiers in thoses cases too, to avoid accidentally discarding the
2663 initial set, and will often end up with old_type == new_type then. */
2664 new_type = build_qualified_type (new_type,
2665 TYPE_QUALS (old_type)
2666 | TYPE_QUALS (new_type));
2667
2668 /* If the new type and the old one are identical, there is nothing to
2669 update. */
2670 if (old_type == new_type)
2671 return;
2672
2673 /* Otherwise, first handle the simple case. */
2674 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2675 {
2676 TYPE_POINTER_TO (new_type) = ptr;
2677 TYPE_REFERENCE_TO (new_type) = ref;
2678
2679 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2680 {
2681 TREE_TYPE (ptr) = new_type;
2682
2683 if (TYPE_NAME (ptr) != 0
2684 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2685 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2686 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2687 global_bindings_p (), 0);
2688 }
2689
2690 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2691 {
2692 TREE_TYPE (ref) = new_type;
2693
2694 if (TYPE_NAME (ref) != 0
2695 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2696 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2697 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2698 global_bindings_p (), 0);
2699 }
2700 }
2701
2702 /* Now deal with the unconstrained array case. In this case the "pointer"
2703 is actually a RECORD_TYPE where the types of both fields are
2704 pointers to void. In that case, copy the field list from the
2705 old type to the new one and update the fields' context. */
2706 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2707 gigi_abort (412);
2708
2709 else
2710 {
2711 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2712 tree ptr_temp_type;
2713 tree new_ref;
2714 tree var;
2715
2716 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2717 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2718 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2719
2720 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2721 template bounds.
2722
2723 ??? This is now the only use of gnat_substitute_in_type, which
2724 is now a very "heavy" routine to do this, so it should be replaced
2725 at some point. */
2726 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2727 new_ref = build (COMPONENT_REF, ptr_temp_type,
2728 build (PLACEHOLDER_EXPR, ptr),
2729 TREE_CHAIN (TYPE_FIELDS (ptr)));
2730
2731 update_pointer_to
2732 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2733 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2734 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2735
2736 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2737 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2738
2739 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2740 = TREE_TYPE (new_type) = ptr;
2741
2742 /* Now handle updating the allocation record, what the thin pointer
2743 points to. Update all pointers from the old record into the new
2744 one, update the types of the fields, and recompute the size. */
2745
2746 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2747
2748 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2749 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2750 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2751 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2752 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2753 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2754 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2755
2756 TYPE_SIZE (new_obj_rec)
2757 = size_binop (PLUS_EXPR,
2758 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2759 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2760 TYPE_SIZE_UNIT (new_obj_rec)
2761 = size_binop (PLUS_EXPR,
2762 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2763 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2764 rest_of_type_compilation (ptr, global_bindings_p ());
2765 }
2766 }
2767 \f
2768 /* Convert a pointer to a constrained array into a pointer to a fat
2769 pointer. This involves making or finding a template. */
2770
2771 static tree
2772 convert_to_fat_pointer (tree type, tree expr)
2773 {
2774 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2775 tree template, template_addr;
2776 tree etype = TREE_TYPE (expr);
2777
2778 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2779 pointer to the template and array. */
2780 if (integer_zerop (expr))
2781 return
2782 gnat_build_constructor
2783 (type,
2784 tree_cons (TYPE_FIELDS (type),
2785 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2786 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2787 convert (build_pointer_type (template_type),
2788 expr),
2789 NULL_TREE)));
2790
2791 /* If EXPR is a thin pointer, make the template and data from the record. */
2792
2793 else if (TYPE_THIN_POINTER_P (etype))
2794 {
2795 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2796
2797 expr = save_expr (expr);
2798 if (TREE_CODE (expr) == ADDR_EXPR)
2799 expr = TREE_OPERAND (expr, 0);
2800 else
2801 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2802
2803 template = build_component_ref (expr, NULL_TREE, fields, 0);
2804 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2805 build_component_ref (expr, NULL_TREE,
2806 TREE_CHAIN (fields), 0));
2807 }
2808 else
2809 /* Otherwise, build the constructor for the template. */
2810 template = build_template (template_type, TREE_TYPE (etype), expr);
2811
2812 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2813
2814 /* The result is a CONSTRUCTOR for the fat pointer.
2815
2816 If expr is an argument of a foreign convention subprogram, the type it
2817 points to is directly the component type. In this case, the expression
2818 type may not match the corresponding FIELD_DECL type at this point, so we
2819 call "convert" here to fix that up if necessary. This type consistency is
2820 required, for instance because it ensures that possible later folding of
2821 component_refs against this constructor always yields something of the
2822 same type as the initial reference.
2823
2824 Note that the call to "build_template" above is still fine, because it
2825 will only refer to the provided template_type in this case. */
2826 return
2827 gnat_build_constructor
2828 (type, tree_cons (TYPE_FIELDS (type),
2829 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2830 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2831 template_addr, NULL_TREE)));
2832 }
2833 \f
2834 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2835 is something that is a fat pointer, so convert to it first if it EXPR
2836 is not already a fat pointer. */
2837
2838 static tree
2839 convert_to_thin_pointer (tree type, tree expr)
2840 {
2841 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2842 expr
2843 = convert_to_fat_pointer
2844 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2845
2846 /* We get the pointer to the data and use a NOP_EXPR to make it the
2847 proper GCC type. */
2848 expr
2849 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2850 expr = build1 (NOP_EXPR, type, expr);
2851
2852 return expr;
2853 }
2854 \f
2855 /* Create an expression whose value is that of EXPR,
2856 converted to type TYPE. The TREE_TYPE of the value
2857 is always TYPE. This function implements all reasonable
2858 conversions; callers should filter out those that are
2859 not permitted by the language being compiled. */
2860
2861 tree
2862 convert (tree type, tree expr)
2863 {
2864 enum tree_code code = TREE_CODE (type);
2865 tree etype = TREE_TYPE (expr);
2866 enum tree_code ecode = TREE_CODE (etype);
2867 tree tem;
2868
2869 /* If EXPR is already the right type, we are done. */
2870 if (type == etype)
2871 return expr;
2872 /* If we're converting between two aggregate types that have the same main
2873 variant, just make a NOP_EXPR. */
2874 else if (AGGREGATE_TYPE_P (type)
2875 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2876 return build1 (NOP_EXPR, type, expr);
2877
2878 /* If the input type has padding, remove it by doing a component reference
2879 to the field. If the output type has padding, make a constructor
2880 to build the record. If both input and output have padding and are
2881 of variable size, do this as an unchecked conversion. */
2882 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2883 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2884 && (! TREE_CONSTANT (TYPE_SIZE (type))
2885 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2886 ;
2887 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2888 {
2889 /* If we have just converted to this padded type, just get
2890 the inner expression. */
2891 if (TREE_CODE (expr) == CONSTRUCTOR
2892 && CONSTRUCTOR_ELTS (expr) != 0
2893 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2894 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2895 else
2896 return convert (type, build_component_ref (expr, NULL_TREE,
2897 TYPE_FIELDS (etype), 0));
2898 }
2899 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2900 {
2901 /* If we previously converted from another type and our type is
2902 of variable size, remove the conversion to avoid the need for
2903 variable-size temporaries. */
2904 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2905 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2906 expr = TREE_OPERAND (expr, 0);
2907
2908 /* If we are just removing the padding from expr, convert the original
2909 object if we have variable size. That will avoid the need
2910 for some variable-size temporaries. */
2911 if (TREE_CODE (expr) == COMPONENT_REF
2912 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2913 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2914 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2915 return convert (type, TREE_OPERAND (expr, 0));
2916
2917 /* If the result type is a padded type with a self-referentially-sized
2918 field and the expression type is a record, do this as an
2919 unchecked converstion. */
2920 else if (TREE_CODE (etype) == RECORD_TYPE
2921 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2922 return unchecked_convert (type, expr, 0);
2923
2924 else
2925 return
2926 gnat_build_constructor (type,
2927 tree_cons (TYPE_FIELDS (type),
2928 convert (TREE_TYPE
2929 (TYPE_FIELDS (type)),
2930 expr),
2931 NULL_TREE));
2932 }
2933
2934 /* If the input is a biased type, adjust first. */
2935 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2936 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2937 fold (build1 (GNAT_NOP_EXPR,
2938 TREE_TYPE (etype), expr)),
2939 TYPE_MIN_VALUE (etype))));
2940
2941 /* If the input is a left-justified modular type, we need to extract
2942 the actual object before converting it to any other type with the
2943 exception of an unconstrained array. */
2944 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2945 && code != UNCONSTRAINED_ARRAY_TYPE)
2946 return convert (type, build_component_ref (expr, NULL_TREE,
2947 TYPE_FIELDS (etype), 0));
2948
2949 /* If converting to a type that contains a template, convert to the data
2950 type and then build the template. */
2951 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2952 {
2953 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2954
2955 /* If the source already has a template, get a reference to the
2956 associated array only, as we are going to rebuild a template
2957 for the target type anyway. */
2958 expr = maybe_unconstrained_array (expr);
2959
2960 return
2961 gnat_build_constructor
2962 (type,
2963 tree_cons (TYPE_FIELDS (type),
2964 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2965 obj_type, NULL_TREE),
2966 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2967 convert (obj_type, expr), NULL_TREE)));
2968 }
2969
2970 /* There are some special cases of expressions that we process
2971 specially. */
2972 switch (TREE_CODE (expr))
2973 {
2974 case ERROR_MARK:
2975 return expr;
2976
2977 case TRANSFORM_EXPR:
2978 case NULL_EXPR:
2979 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2980 conversion in gnat_expand_expr. NULL_EXPR does not represent
2981 and actual value, so no conversion is needed. */
2982 expr = copy_node (expr);
2983 TREE_TYPE (expr) = type;
2984 return expr;
2985
2986 case STRING_CST:
2987 case CONSTRUCTOR:
2988 /* If we are converting a STRING_CST to another constrained array type,
2989 just make a new one in the proper type. Likewise for
2990 CONSTRUCTOR if the alias sets are the same. */
2991 if (code == ecode && AGGREGATE_TYPE_P (etype)
2992 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2993 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2994 && (TREE_CODE (expr) == STRING_CST
2995 || get_alias_set (etype) == get_alias_set (type)))
2996 {
2997 expr = copy_node (expr);
2998 TREE_TYPE (expr) = type;
2999 return expr;
3000 }
3001 break;
3002
3003 case COMPONENT_REF:
3004 /* If we are converting between two aggregate types of the same
3005 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3006 This avoid unneeded conversions which makes reference computations
3007 more complex. */
3008 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
3009 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3010 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3011 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0)
3012 && get_alias_set (type) == get_alias_set (etype))
3013 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
3014 TREE_OPERAND (expr, 1));
3015
3016 break;
3017
3018 case UNCONSTRAINED_ARRAY_REF:
3019 /* Convert this to the type of the inner array by getting the address of
3020 the array from the template. */
3021 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3022 build_component_ref (TREE_OPERAND (expr, 0),
3023 get_identifier ("P_ARRAY"),
3024 NULL_TREE, 0));
3025 etype = TREE_TYPE (expr);
3026 ecode = TREE_CODE (etype);
3027 break;
3028
3029 case VIEW_CONVERT_EXPR:
3030 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3031 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3032 return convert (type, TREE_OPERAND (expr, 0));
3033 break;
3034
3035 case INDIRECT_REF:
3036 /* If both types are record types, just convert the pointer and
3037 make a new INDIRECT_REF.
3038
3039 ??? Disable this for now since it causes problems with the
3040 code in build_binary_op for MODIFY_EXPR which wants to
3041 strip off conversions. But that code really is a mess and
3042 we need to do this a much better way some time. */
3043 if (0
3044 && (TREE_CODE (type) == RECORD_TYPE
3045 || TREE_CODE (type) == UNION_TYPE)
3046 && (TREE_CODE (etype) == RECORD_TYPE
3047 || TREE_CODE (etype) == UNION_TYPE)
3048 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3049 return build_unary_op (INDIRECT_REF, NULL_TREE,
3050 convert (build_pointer_type (type),
3051 TREE_OPERAND (expr, 0)));
3052 break;
3053
3054 default:
3055 break;
3056 }
3057
3058 /* Check for converting to a pointer to an unconstrained array. */
3059 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3060 return convert_to_fat_pointer (type, expr);
3061
3062 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3063 || (code == INTEGER_CST && ecode == INTEGER_CST
3064 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3065 return fold (build1 (NOP_EXPR, type, expr));
3066
3067 switch (code)
3068 {
3069 case VOID_TYPE:
3070 return build1 (CONVERT_EXPR, type, expr);
3071
3072 case INTEGER_TYPE:
3073 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3074 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3075 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3076 return unchecked_convert (type, expr, 0);
3077 else if (TYPE_BIASED_REPRESENTATION_P (type))
3078 return fold (build1 (CONVERT_EXPR, type,
3079 fold (build (MINUS_EXPR, TREE_TYPE (type),
3080 convert (TREE_TYPE (type), expr),
3081 TYPE_MIN_VALUE (type)))));
3082
3083 /* ... fall through ... */
3084
3085 case ENUMERAL_TYPE:
3086 return fold (convert_to_integer (type, expr));
3087
3088 case POINTER_TYPE:
3089 case REFERENCE_TYPE:
3090 /* If converting between two pointers to records denoting
3091 both a template and type, adjust if needed to account
3092 for any differing offsets, since one might be negative. */
3093 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3094 {
3095 tree bit_diff
3096 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3097 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3098 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3099 sbitsize_int (BITS_PER_UNIT));
3100
3101 expr = build1 (NOP_EXPR, type, expr);
3102 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3103 if (integer_zerop (byte_diff))
3104 return expr;
3105
3106 return build_binary_op (PLUS_EXPR, type, expr,
3107 fold (convert_to_pointer (type, byte_diff)));
3108 }
3109
3110 /* If converting to a thin pointer, handle specially. */
3111 if (TYPE_THIN_POINTER_P (type)
3112 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3113 return convert_to_thin_pointer (type, expr);
3114
3115 /* If converting fat pointer to normal pointer, get the pointer to the
3116 array and then convert it. */
3117 else if (TYPE_FAT_POINTER_P (etype))
3118 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3119 NULL_TREE, 0);
3120
3121 return fold (convert_to_pointer (type, expr));
3122
3123 case REAL_TYPE:
3124 return fold (convert_to_real (type, expr));
3125
3126 case RECORD_TYPE:
3127 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3128 return
3129 gnat_build_constructor
3130 (type, tree_cons (TYPE_FIELDS (type),
3131 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3132 NULL_TREE));
3133
3134 /* ... fall through ... */
3135
3136 case ARRAY_TYPE:
3137 /* In these cases, assume the front-end has validated the conversion.
3138 If the conversion is valid, it will be a bit-wise conversion, so
3139 it can be viewed as an unchecked conversion. */
3140 return unchecked_convert (type, expr, 0);
3141
3142 case UNION_TYPE:
3143 /* Just validate that the type is indeed that of a field
3144 of the type. Then make the simple conversion. */
3145 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3146 {
3147 if (TREE_TYPE (tem) == etype)
3148 return build1 (CONVERT_EXPR, type, expr);
3149 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3150 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3151 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3152 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3153 return build1 (CONVERT_EXPR, type,
3154 convert (TREE_TYPE (tem), expr));
3155 }
3156
3157 gigi_abort (413);
3158
3159 case UNCONSTRAINED_ARRAY_TYPE:
3160 /* If EXPR is a constrained array, take its address, convert it to a
3161 fat pointer, and then dereference it. Likewise if EXPR is a
3162 record containing both a template and a constrained array.
3163 Note that a record representing a left justified modular type
3164 always represents a packed constrained array. */
3165 if (ecode == ARRAY_TYPE
3166 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3167 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3168 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3169 return
3170 build_unary_op
3171 (INDIRECT_REF, NULL_TREE,
3172 convert_to_fat_pointer (TREE_TYPE (type),
3173 build_unary_op (ADDR_EXPR,
3174 NULL_TREE, expr)));
3175
3176 /* Do something very similar for converting one unconstrained
3177 array to another. */
3178 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3179 return
3180 build_unary_op (INDIRECT_REF, NULL_TREE,
3181 convert (TREE_TYPE (type),
3182 build_unary_op (ADDR_EXPR,
3183 NULL_TREE, expr)));
3184 else
3185 gigi_abort (409);
3186
3187 case COMPLEX_TYPE:
3188 return fold (convert_to_complex (type, expr));
3189
3190 default:
3191 gigi_abort (410);
3192 }
3193 }
3194 \f
3195 /* Remove all conversions that are done in EXP. This includes converting
3196 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3197 is nonzero, always return the address of the containing object even if
3198 the address is not bit-aligned. */
3199
3200 tree
3201 remove_conversions (tree exp, int true_address)
3202 {
3203 switch (TREE_CODE (exp))
3204 {
3205 case CONSTRUCTOR:
3206 if (true_address
3207 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3208 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3209 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3210 break;
3211
3212 case COMPONENT_REF:
3213 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3214 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3215 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3216 break;
3217
3218 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3219 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3220 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3221
3222 default:
3223 break;
3224 }
3225
3226 return exp;
3227 }
3228 \f
3229 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3230 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3231 likewise return an expression pointing to the underlying array. */
3232
3233 tree
3234 maybe_unconstrained_array (tree exp)
3235 {
3236 enum tree_code code = TREE_CODE (exp);
3237 tree new;
3238
3239 switch (TREE_CODE (TREE_TYPE (exp)))
3240 {
3241 case UNCONSTRAINED_ARRAY_TYPE:
3242 if (code == UNCONSTRAINED_ARRAY_REF)
3243 {
3244 new
3245 = build_unary_op (INDIRECT_REF, NULL_TREE,
3246 build_component_ref (TREE_OPERAND (exp, 0),
3247 get_identifier ("P_ARRAY"),
3248 NULL_TREE, 0));
3249 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3250 return new;
3251 }
3252
3253 else if (code == NULL_EXPR)
3254 return build1 (NULL_EXPR,
3255 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3256 (TREE_TYPE (TREE_TYPE (exp))))),
3257 TREE_OPERAND (exp, 0));
3258
3259 case RECORD_TYPE:
3260 /* If this is a padded type, convert to the unpadded type and see if
3261 it contains a template. */
3262 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3263 {
3264 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3265 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3266 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3267 return
3268 build_component_ref (new, NULL_TREE,
3269 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3270 0);
3271 }
3272 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3273 return
3274 build_component_ref (exp, NULL_TREE,
3275 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3276 break;
3277
3278 default:
3279 break;
3280 }
3281
3282 return exp;
3283 }
3284 \f
3285 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3286 If NOTRUNC_P is set, truncation operations should be suppressed. */
3287
3288 tree
3289 unchecked_convert (tree type, tree expr, int notrunc_p)
3290 {
3291 tree etype = TREE_TYPE (expr);
3292
3293 /* If the expression is already the right type, we are done. */
3294 if (etype == type)
3295 return expr;
3296
3297 /* If both types types are integral just do a normal conversion.
3298 Likewise for a conversion to an unconstrained array. */
3299 if ((((INTEGRAL_TYPE_P (type)
3300 && ! (TREE_CODE (type) == INTEGER_TYPE
3301 && TYPE_VAX_FLOATING_POINT_P (type)))
3302 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3303 || (TREE_CODE (type) == RECORD_TYPE
3304 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3305 && ((INTEGRAL_TYPE_P (etype)
3306 && ! (TREE_CODE (etype) == INTEGER_TYPE
3307 && TYPE_VAX_FLOATING_POINT_P (etype)))
3308 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3309 || (TREE_CODE (etype) == RECORD_TYPE
3310 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3311 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3312 {
3313 tree rtype = type;
3314
3315 if (TREE_CODE (etype) == INTEGER_TYPE
3316 && TYPE_BIASED_REPRESENTATION_P (etype))
3317 {
3318 tree ntype = copy_type (etype);
3319
3320 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3321 TYPE_MAIN_VARIANT (ntype) = ntype;
3322 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3323 }
3324
3325 if (TREE_CODE (type) == INTEGER_TYPE
3326 && TYPE_BIASED_REPRESENTATION_P (type))
3327 {
3328 rtype = copy_type (type);
3329 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3330 TYPE_MAIN_VARIANT (rtype) = rtype;
3331 }
3332
3333 expr = convert (rtype, expr);
3334 if (type != rtype)
3335 expr = build1 (GNAT_NOP_EXPR, type, expr);
3336 }
3337
3338 /* If we are converting TO an integral type whose precision is not the
3339 same as its size, first unchecked convert to a record that contains
3340 an object of the output type. Then extract the field. */
3341 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3342 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3343 GET_MODE_BITSIZE (TYPE_MODE (type))))
3344 {
3345 tree rec_type = make_node (RECORD_TYPE);
3346 tree field = create_field_decl (get_identifier ("OBJ"), type,
3347 rec_type, 1, 0, 0, 0);
3348
3349 TYPE_FIELDS (rec_type) = field;
3350 layout_type (rec_type);
3351
3352 expr = unchecked_convert (rec_type, expr, notrunc_p);
3353 expr = build_component_ref (expr, NULL_TREE, field, 0);
3354 }
3355
3356 /* Similarly for integral input type whose precision is not equal to its
3357 size. */
3358 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3359 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3360 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3361 {
3362 tree rec_type = make_node (RECORD_TYPE);
3363 tree field
3364 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3365 1, 0, 0, 0);
3366
3367 TYPE_FIELDS (rec_type) = field;
3368 layout_type (rec_type);
3369
3370 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3371 expr = unchecked_convert (type, expr, notrunc_p);
3372 }
3373
3374 /* We have a special case when we are converting between two
3375 unconstrained array types. In that case, take the address,
3376 convert the fat pointer types, and dereference. */
3377 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3378 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3379 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3380 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3381 build_unary_op (ADDR_EXPR, NULL_TREE,
3382 expr)));
3383 else
3384 {
3385 expr = maybe_unconstrained_array (expr);
3386 etype = TREE_TYPE (expr);
3387 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3388 }
3389
3390 /* If the result is an integral type whose size is not equal to
3391 the size of the underlying machine type, sign- or zero-extend
3392 the result. We need not do this in the case where the input is
3393 an integral type of the same precision and signedness or if the output
3394 is a biased type or if both the input and output are unsigned. */
3395 if (! notrunc_p
3396 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3397 && ! (TREE_CODE (type) == INTEGER_TYPE
3398 && TYPE_BIASED_REPRESENTATION_P (type))
3399 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3400 GET_MODE_BITSIZE (TYPE_MODE (type)))
3401 && ! (INTEGRAL_TYPE_P (etype)
3402 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3403 && operand_equal_p (TYPE_RM_SIZE (type),
3404 (TYPE_RM_SIZE (etype) != 0
3405 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3406 0))
3407 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3408 {
3409 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3410 TYPE_UNSIGNED (type));
3411 tree shift_expr
3412 = convert (base_type,
3413 size_binop (MINUS_EXPR,
3414 bitsize_int
3415 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3416 TYPE_RM_SIZE (type)));
3417 expr
3418 = convert (type,
3419 build_binary_op (RSHIFT_EXPR, base_type,
3420 build_binary_op (LSHIFT_EXPR, base_type,
3421 convert (base_type, expr),
3422 shift_expr),
3423 shift_expr));
3424 }
3425
3426 /* An unchecked conversion should never raise Constraint_Error. The code
3427 below assumes that GCC's conversion routines overflow the same way that
3428 the underlying hardware does. This is probably true. In the rare case
3429 when it is false, we can rely on the fact that such conversions are
3430 erroneous anyway. */
3431 if (TREE_CODE (expr) == INTEGER_CST)
3432 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3433
3434 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3435 show no longer constant. */
3436 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3437 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3438 TREE_CONSTANT (expr) = 0;
3439
3440 return expr;
3441 }
3442
3443 #include "gt-ada-utils.h"
3444 #include "gtype-ada.h"