[multiple changes]
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41
42 #include "ada.h"
43 #include "types.h"
44 #include "atree.h"
45 #include "elists.h"
46 #include "namet.h"
47 #include "nlists.h"
48 #include "stringt.h"
49 #include "uintp.h"
50 #include "fe.h"
51 #include "sinfo.h"
52 #include "einfo.h"
53 #include "ada-tree.h"
54 #include "gigi.h"
55
56 #ifndef MAX_FIXED_MODE_SIZE
57 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
58 #endif
59
60 #ifndef MAX_BITS_PER_WORD
61 #define MAX_BITS_PER_WORD BITS_PER_WORD
62 #endif
63
64 /* If nonzero, pretend we are allocating at global level. */
65 int force_global;
66
67 /* Tree nodes for the various types and decls we create. */
68 tree gnat_std_decls[(int) ADT_LAST];
69
70 /* Functions to call for each of the possible raise reasons. */
71 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
72
73 /* Associates a GNAT tree node to a GCC tree node. It is used in
74 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
75 of `save_gnu_tree' for more info. */
76 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
77
78 /* This listhead is used to record any global objects that need elaboration.
79 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
80 initial value to assign. */
81
82 static GTY(()) tree pending_elaborations;
83
84 /* This stack allows us to momentarily switch to generating elaboration
85 lists for an inner context. */
86
87 struct e_stack GTY(()) {
88 struct e_stack *next;
89 tree elab_list;
90 };
91 static GTY(()) struct e_stack *elist_stack;
92
93 /* This variable keeps a table for types for each precision so that we only
94 allocate each of them once. Signed and unsigned types are kept separate.
95
96 Note that these types are only used when fold-const requests something
97 special. Perhaps we should NOT share these types; we'll see how it
98 goes later. */
99 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
100
101 /* Likewise for float types, but record these by mode. */
102 static GTY(()) tree float_types[NUM_MACHINE_MODES];
103
104 /* For each binding contour we allocate a binding_level structure which records
105 the entities defined or declared in that contour. Contours include:
106
107 the global one
108 one for each subprogram definition
109 one for each compound statement (declare block)
110
111 Binding contours are used to create GCC tree BLOCK nodes. */
112
113 struct binding_level GTY(())
114 {
115 /* A chain of ..._DECL nodes for all variables, constants, functions,
116 parameters and type declarations. These ..._DECL nodes are chained
117 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
118 in the reverse of the order supplied to be compatible with the
119 back-end. */
120 tree names;
121 /* For each level (except the global one), a chain of BLOCK nodes for all
122 the levels that were entered and exited one level down from this one. */
123 tree blocks;
124 /* The BLOCK node for this level, if one has been preallocated.
125 If 0, the BLOCK is allocated (if needed) when the level is popped. */
126 tree this_block;
127 /* The binding level containing this one (the enclosing binding level). */
128 struct binding_level *level_chain;
129 };
130
131 /* The binding level currently in effect. */
132 static GTY(()) struct binding_level *current_binding_level;
133
134 /* A chain of binding_level structures awaiting reuse. */
135 static GTY((deletable (""))) struct binding_level *free_binding_level;
136
137 /* The outermost binding level. This binding level is created when the
138 compiler is started and it will exist through the entire compilation. */
139 static struct binding_level *global_binding_level;
140
141 /* Binding level structures are initialized by copying this one. */
142 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
143
144 struct language_function GTY(())
145 {
146 int unused;
147 };
148
149 static tree merge_sizes (tree, tree, tree, int, int);
150 static tree compute_related_constant (tree, tree);
151 static tree split_plus (tree, tree *);
152 static int value_zerop (tree);
153 static tree float_type_for_precision (int, enum machine_mode);
154 static tree convert_to_fat_pointer (tree, tree);
155 static tree convert_to_thin_pointer (tree, tree);
156 static tree make_descriptor_field (const char *,tree, tree, tree);
157 static int value_factor_p (tree, int);
158 static int potential_alignment_gap (tree, tree, tree);
159 \f
160 /* Initialize the association of GNAT nodes to GCC trees. */
161
162 void
163 init_gnat_to_gnu (void)
164 {
165 associate_gnat_to_gnu
166 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
167
168 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
169 }
170
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
174
175 If GNU_DECL is zero, a previous association is to be reset. */
176
177 void
178 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
179 {
180 /* Check that GNAT_ENTITY is not already defined and that it is being set
181 to something which is a decl. Raise gigi 401 if not. Usually, this
182 means GNAT_ENTITY is defined twice, but occasionally is due to some
183 Gigi problem. */
184 if (gnu_decl
185 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
186 || (! no_check && ! DECL_P (gnu_decl))))
187 gigi_abort (401);
188
189 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
190 }
191
192 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
193 Return the ..._DECL node that was associated with it. If there is no tree
194 node associated with GNAT_ENTITY, abort.
195
196 In some cases, such as delayed elaboration or expressions that need to
197 be elaborated only once, GNAT_ENTITY is really not an entity. */
198
199 tree
200 get_gnu_tree (Entity_Id gnat_entity)
201 {
202 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
203 gigi_abort (402);
204
205 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
206 }
207
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
209
210 int
211 present_gnu_tree (Entity_Id gnat_entity)
212 {
213 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
214 }
215
216 \f
217 /* Return non-zero if we are currently in the global binding level. */
218
219 int
220 global_bindings_p (void)
221 {
222 return (force_global != 0 || current_binding_level == global_binding_level
223 ? -1 : 0);
224 }
225
226 /* Return the list of declarations in the current level. Note that this list
227 is in reverse order (it has to be so for back-end compatibility). */
228
229 tree
230 getdecls (void)
231 {
232 return current_binding_level->names;
233 }
234
235 /* Nonzero if the current level needs to have a BLOCK made. */
236
237 int
238 kept_level_p (void)
239 {
240 return (current_binding_level->names != 0);
241 }
242
243 /* Enter a new binding level. The input parameter is ignored, but has to be
244 specified for back-end compatibility. */
245
246 void
247 pushlevel (int ignore ATTRIBUTE_UNUSED)
248 {
249 struct binding_level *newlevel = NULL;
250
251 /* Reuse a struct for this binding level, if there is one. */
252 if (free_binding_level)
253 {
254 newlevel = free_binding_level;
255 free_binding_level = free_binding_level->level_chain;
256 }
257 else
258 newlevel
259 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
260
261 *newlevel = clear_binding_level;
262
263 /* Add this level to the front of the chain (stack) of levels that are
264 active. */
265 newlevel->level_chain = current_binding_level;
266 current_binding_level = newlevel;
267 }
268
269 /* Exit a binding level.
270 Pop the level off, and restore the state of the identifier-decl mappings
271 that were in effect when this level was entered.
272
273 If KEEP is nonzero, this level had explicit declarations, so
274 and create a "block" (a BLOCK node) for the level
275 to record its declarations and subblocks for symbol table output.
276
277 If FUNCTIONBODY is nonzero, this level is the body of a function,
278 so create a block as if KEEP were set and also clear out all
279 label names.
280
281 If REVERSE is nonzero, reverse the order of decls before putting
282 them into the BLOCK. */
283
284 tree
285 poplevel (int keep, int reverse, int functionbody)
286 {
287 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
288 binding level that we are about to exit and which is returned by this
289 routine. */
290 tree block = NULL_TREE;
291 tree decl_chain;
292 tree decl_node;
293 tree subblock_chain = current_binding_level->blocks;
294 tree subblock_node;
295 int block_previously_created;
296
297 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
298 nodes chained through the `names' field of current_binding_level are in
299 reverse order except for PARM_DECL node, which are explicitly stored in
300 the right order. */
301 current_binding_level->names
302 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
303 : current_binding_level->names;
304
305 /* Output any nested inline functions within this block which must be
306 compiled because their address is needed. */
307 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
308 if (TREE_CODE (decl_node) == FUNCTION_DECL
309 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
310 && DECL_INITIAL (decl_node) != 0)
311 {
312 push_function_context ();
313 /* ??? This is temporary. */
314 ggc_push_context ();
315 output_inline_function (decl_node);
316 ggc_pop_context ();
317 pop_function_context ();
318 }
319
320 block = 0;
321 block_previously_created = (current_binding_level->this_block != 0);
322 if (block_previously_created)
323 block = current_binding_level->this_block;
324 else if (keep || functionbody)
325 block = make_node (BLOCK);
326 if (block != 0)
327 {
328 BLOCK_VARS (block) = keep ? decl_chain : 0;
329 BLOCK_SUBBLOCKS (block) = subblock_chain;
330 }
331
332 /* Record the BLOCK node just built as the subblock its enclosing scope. */
333 for (subblock_node = subblock_chain; subblock_node;
334 subblock_node = TREE_CHAIN (subblock_node))
335 BLOCK_SUPERCONTEXT (subblock_node) = block;
336
337 /* Clear out the meanings of the local variables of this level. */
338
339 for (subblock_node = decl_chain; subblock_node;
340 subblock_node = TREE_CHAIN (subblock_node))
341 if (DECL_NAME (subblock_node) != 0)
342 /* If the identifier was used or addressed via a local extern decl,
343 don't forget that fact. */
344 if (DECL_EXTERNAL (subblock_node))
345 {
346 if (TREE_USED (subblock_node))
347 TREE_USED (DECL_NAME (subblock_node)) = 1;
348 if (TREE_ADDRESSABLE (subblock_node))
349 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
350 }
351
352 {
353 /* Pop the current level, and free the structure for reuse. */
354 struct binding_level *level = current_binding_level;
355 current_binding_level = current_binding_level->level_chain;
356 level->level_chain = free_binding_level;
357 free_binding_level = level;
358 }
359
360 if (functionbody)
361 {
362 /* This is the top level block of a function. The ..._DECL chain stored
363 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
364 leave them in the BLOCK because they are found in the FUNCTION_DECL
365 instead. */
366 DECL_INITIAL (current_function_decl) = block;
367 BLOCK_VARS (block) = 0;
368 }
369 else if (block)
370 {
371 if (!block_previously_created)
372 current_binding_level->blocks
373 = chainon (current_binding_level->blocks, block);
374 }
375
376 /* If we did not make a block for the level just exited, any blocks made for
377 inner levels (since they cannot be recorded as subblocks in that level)
378 must be carried forward so they will later become subblocks of something
379 else. */
380 else if (subblock_chain)
381 current_binding_level->blocks
382 = chainon (current_binding_level->blocks, subblock_chain);
383 if (block)
384 TREE_USED (block) = 1;
385
386 return block;
387 }
388 \f
389 /* Insert BLOCK at the end of the list of subblocks of the
390 current binding level. This is used when a BIND_EXPR is expanded,
391 to handle the BLOCK node inside the BIND_EXPR. */
392
393 void
394 insert_block (tree block)
395 {
396 TREE_USED (block) = 1;
397 current_binding_level->blocks
398 = chainon (current_binding_level->blocks, block);
399 }
400
401 /* Set the BLOCK node for the innermost scope
402 (the one we are currently in). */
403
404 void
405 set_block (tree block)
406 {
407 current_binding_level->this_block = block;
408 current_binding_level->names = chainon (current_binding_level->names,
409 BLOCK_VARS (block));
410 current_binding_level->blocks = chainon (current_binding_level->blocks,
411 BLOCK_SUBBLOCKS (block));
412 }
413
414 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
415 Returns the ..._DECL node. */
416
417 tree
418 pushdecl (tree decl)
419 {
420 struct binding_level *b;
421
422 /* If at top level, there is no context. But PARM_DECLs always go in the
423 level of its function. */
424 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
425 {
426 b = global_binding_level;
427 DECL_CONTEXT (decl) = 0;
428 }
429 else
430 {
431 b = current_binding_level;
432 DECL_CONTEXT (decl) = current_function_decl;
433 }
434
435 /* Put the declaration on the list. The list of declarations is in reverse
436 order. The list will be reversed later if necessary. This needs to be
437 this way for compatibility with the back-end.
438
439 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
440 will cause trouble with the debugger and aren't needed anyway. */
441 if (TREE_CODE (decl) != TYPE_DECL
442 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
443 {
444 TREE_CHAIN (decl) = b->names;
445 b->names = decl;
446 }
447
448 /* For the declaration of a type, set its name if it either is not already
449 set, was set to an IDENTIFIER_NODE, indicating an internal name,
450 or if the previous type name was not derived from a source name.
451 We'd rather have the type named with a real name and all the pointer
452 types to the same object have the same POINTER_TYPE node. Code in this
453 function in c-decl.c makes a copy of the type node here, but that may
454 cause us trouble with incomplete types, so let's not try it (at least
455 for now). */
456
457 if (TREE_CODE (decl) == TYPE_DECL
458 && DECL_NAME (decl) != 0
459 && (TYPE_NAME (TREE_TYPE (decl)) == 0
460 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
461 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
462 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
463 && ! DECL_ARTIFICIAL (decl))))
464 TYPE_NAME (TREE_TYPE (decl)) = decl;
465
466 return decl;
467 }
468 \f
469 /* Do little here. Set up the standard declarations later after the
470 front end has been run. */
471
472 void
473 gnat_init_decl_processing (void)
474 {
475 input_line = 0;
476
477 /* Make the binding_level structure for global names. */
478 current_function_decl = 0;
479 current_binding_level = 0;
480 free_binding_level = 0;
481 pushlevel (0);
482 global_binding_level = current_binding_level;
483
484 build_common_tree_nodes (0);
485
486 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
487 corresponding to the size of Pmode. In most cases when ptr_mode and
488 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
489 far better code using the width of Pmode. Make this here since we need
490 this before we can expand the GNAT types. */
491 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
492 build_common_tree_nodes_2 (0);
493
494 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
495
496 /* We need to make the integer type before doing anything else.
497 We stitch this in to the appropriate GNAT type later. */
498 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
499 integer_type_node));
500 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
501 char_type_node));
502
503 ptr_void_type_node = build_pointer_type (void_type_node);
504
505 }
506
507 /* Create the predefined scalar types such as `integer_type_node' needed
508 in the gcc back-end and initialize the global binding level. */
509
510 void
511 init_gigi_decls (tree long_long_float_type, tree exception_type)
512 {
513 tree endlink, decl;
514 unsigned int i;
515
516 /* Set the types that GCC and Gigi use from the front end. We would like
517 to do this for char_type_node, but it needs to correspond to the C
518 char type. */
519 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
520 {
521 /* In this case, the builtin floating point types are VAX float,
522 so make up a type for use. */
523 longest_float_type_node = make_node (REAL_TYPE);
524 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
525 layout_type (longest_float_type_node);
526 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
527 longest_float_type_node));
528 }
529 else
530 longest_float_type_node = TREE_TYPE (long_long_float_type);
531
532 except_type_node = TREE_TYPE (exception_type);
533
534 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
535 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
536 unsigned_type_node));
537
538 void_type_decl_node
539 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
540 void_type_node));
541
542 void_ftype = build_function_type (void_type_node, NULL_TREE);
543 ptr_void_ftype = build_pointer_type (void_ftype);
544
545 /* Now declare runtime functions. */
546 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
547
548 /* malloc is a function declaration tree for a function to allocate
549 memory. */
550 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
551 NULL_TREE,
552 build_function_type (ptr_void_type_node,
553 tree_cons (NULL_TREE,
554 sizetype,
555 endlink)),
556 NULL_TREE, 0, 1, 1, 0);
557
558 /* free is a function declaration tree for a function to free memory. */
559 free_decl
560 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
561 build_function_type (void_type_node,
562 tree_cons (NULL_TREE,
563 ptr_void_type_node,
564 endlink)),
565 NULL_TREE, 0, 1, 1, 0);
566
567 /* Make the types and functions used for exception processing. */
568 jmpbuf_type
569 = build_array_type (gnat_type_for_mode (Pmode, 0),
570 build_index_type (build_int_2 (5, 0)));
571 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
572 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
573
574 /* Functions to get and set the jumpbuf pointer for the current thread. */
575 get_jmpbuf_decl
576 = create_subprog_decl
577 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
578 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
579 NULL_TREE, 0, 1, 1, 0);
580
581 set_jmpbuf_decl
582 = create_subprog_decl
583 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
584 NULL_TREE,
585 build_function_type (void_type_node,
586 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
587 NULL_TREE, 0, 1, 1, 0);
588
589 /* Function to get the current exception. */
590 get_excptr_decl
591 = create_subprog_decl
592 (get_identifier ("system__soft_links__get_gnat_exception"),
593 NULL_TREE,
594 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
595 NULL_TREE, 0, 1, 1, 0);
596
597 /* Functions that raise exceptions. */
598 raise_nodefer_decl
599 = create_subprog_decl
600 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
601 build_function_type (void_type_node,
602 tree_cons (NULL_TREE,
603 build_pointer_type (except_type_node),
604 endlink)),
605 NULL_TREE, 0, 1, 1, 0);
606
607 /* Hooks to call when entering/leaving an exception handler. */
608 begin_handler_decl
609 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
610 build_function_type (void_type_node,
611 tree_cons (NULL_TREE,
612 ptr_void_type_node,
613 endlink)),
614 NULL_TREE, 0, 1, 1, 0);
615
616 end_handler_decl
617 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
618 build_function_type (void_type_node,
619 tree_cons (NULL_TREE,
620 ptr_void_type_node,
621 endlink)),
622 NULL_TREE, 0, 1, 1, 0);
623
624 /* If in no exception handlers mode, all raise statements are redirected to
625 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
626 this procedure will never be called in this mode. */
627 if (No_Exception_Handlers_Set ())
628 {
629 decl
630 = create_subprog_decl
631 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
632 build_function_type (void_type_node,
633 tree_cons (NULL_TREE,
634 build_pointer_type (char_type_node),
635 tree_cons (NULL_TREE,
636 integer_type_node,
637 endlink))),
638 NULL_TREE, 0, 1, 1, 0);
639
640 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
641 gnat_raise_decls[i] = decl;
642 }
643 else
644 /* Otherwise, make one decl for each exception reason. */
645 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
646 {
647 char name[17];
648
649 sprintf (name, "__gnat_rcheck_%.2d", i);
650 gnat_raise_decls[i]
651 = create_subprog_decl
652 (get_identifier (name), NULL_TREE,
653 build_function_type (void_type_node,
654 tree_cons (NULL_TREE,
655 build_pointer_type
656 (char_type_node),
657 tree_cons (NULL_TREE,
658 integer_type_node,
659 endlink))),
660 NULL_TREE, 0, 1, 1, 0);
661 }
662
663 /* Indicate that these never return. */
664 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
665 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
666 TREE_TYPE (raise_nodefer_decl)
667 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
668 TYPE_QUAL_VOLATILE);
669
670 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
671 {
672 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
673 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
674 TREE_TYPE (gnat_raise_decls[i])
675 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
676 TYPE_QUAL_VOLATILE);
677 }
678
679 /* setjmp returns an integer and has one operand, which is a pointer to
680 a jmpbuf. */
681 setjmp_decl
682 = create_subprog_decl
683 (get_identifier ("__builtin_setjmp"), NULL_TREE,
684 build_function_type (integer_type_node,
685 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
686 NULL_TREE, 0, 1, 1, 0);
687
688 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
689 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
690
691 main_identifier_node = get_identifier ("main");
692 }
693 \f
694 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
695 nodes (FIELDLIST), finish constructing the record or union type.
696 If HAS_REP is nonzero, this record has a rep clause; don't call
697 layout_type but merely set the size and alignment ourselves.
698 If DEFER_DEBUG is nonzero, do not call the debugging routines
699 on this type; it will be done later. */
700
701 void
702 finish_record_type (tree record_type,
703 tree fieldlist,
704 int has_rep,
705 int defer_debug)
706 {
707 enum tree_code code = TREE_CODE (record_type);
708 tree ada_size = bitsize_zero_node;
709 tree size = bitsize_zero_node;
710 tree size_unit = size_zero_node;
711 int var_size = 0;
712 tree field;
713
714 TYPE_FIELDS (record_type) = fieldlist;
715
716 if (TYPE_NAME (record_type) != 0
717 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
718 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
719 else
720 TYPE_STUB_DECL (record_type)
721 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
722 record_type));
723
724 /* We don't need both the typedef name and the record name output in
725 the debugging information, since they are the same. */
726 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
727
728 /* Globally initialize the record first. If this is a rep'ed record,
729 that just means some initializations; otherwise, layout the record. */
730
731 if (has_rep)
732 {
733 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
734 TYPE_MODE (record_type) = BLKmode;
735 if (TYPE_SIZE (record_type) == 0)
736 {
737 TYPE_SIZE (record_type) = bitsize_zero_node;
738 TYPE_SIZE_UNIT (record_type) = size_zero_node;
739 }
740 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
741 out just like a UNION_TYPE, since the size will be fixed. */
742 else if (code == QUAL_UNION_TYPE)
743 code = UNION_TYPE;
744 }
745 else
746 {
747 /* Ensure there isn't a size already set. There can be in an error
748 case where there is a rep clause but all fields have errors and
749 no longer have a position. */
750 TYPE_SIZE (record_type) = 0;
751 layout_type (record_type);
752 }
753
754 /* At this point, the position and size of each field is known. It was
755 either set before entry by a rep clause, or by laying out the type above.
756
757 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
758 to compute the Ada size; the GCC size and alignment (for rep'ed records
759 that are not padding types); and the mode (for rep'ed records). We also
760 clear the DECL_BIT_FIELD indication for the cases we know have not been
761 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
762
763 if (code == QUAL_UNION_TYPE)
764 fieldlist = nreverse (fieldlist);
765
766 for (field = fieldlist; field; field = TREE_CHAIN (field))
767 {
768 tree pos = bit_position (field);
769
770 tree type = TREE_TYPE (field);
771 tree this_size = DECL_SIZE (field);
772 tree this_size_unit = DECL_SIZE_UNIT (field);
773 tree this_ada_size = DECL_SIZE (field);
774
775 /* We need to make an XVE/XVU record if any field has variable size,
776 whether or not the record does. For example, if we have an union,
777 it may be that all fields, rounded up to the alignment, have the
778 same size, in which case we'll use that size. But the debug
779 output routines (except Dwarf2) won't be able to output the fields,
780 so we need to make the special record. */
781 if (TREE_CODE (this_size) != INTEGER_CST)
782 var_size = 1;
783
784 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
785 || TREE_CODE (type) == QUAL_UNION_TYPE)
786 && ! TYPE_IS_FAT_POINTER_P (type)
787 && ! TYPE_CONTAINS_TEMPLATE_P (type)
788 && TYPE_ADA_SIZE (type) != 0)
789 this_ada_size = TYPE_ADA_SIZE (type);
790
791 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
792 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
793 && value_factor_p (pos, BITS_PER_UNIT)
794 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
795 DECL_BIT_FIELD (field) = 0;
796
797 /* If we still have DECL_BIT_FIELD set at this point, we know the field
798 is technically not addressable. Except that it can actually be
799 addressed if the field is BLKmode and happens to be properly
800 aligned. */
801 DECL_NONADDRESSABLE_P (field)
802 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
803
804 if (has_rep && ! DECL_BIT_FIELD (field))
805 TYPE_ALIGN (record_type)
806 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
807
808 switch (code)
809 {
810 case UNION_TYPE:
811 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
812 size = size_binop (MAX_EXPR, size, this_size);
813 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
814 break;
815
816 case QUAL_UNION_TYPE:
817 ada_size
818 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
819 this_ada_size, ada_size));
820 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
821 this_size, size));
822 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
823 this_size_unit, size_unit));
824 break;
825
826 case RECORD_TYPE:
827 /* Since we know here that all fields are sorted in order of
828 increasing bit position, the size of the record is one
829 higher than the ending bit of the last field processed
830 unless we have a rep clause, since in that case we might
831 have a field outside a QUAL_UNION_TYPE that has a higher ending
832 position. So use a MAX in that case. Also, if this field is a
833 QUAL_UNION_TYPE, we need to take into account the previous size in
834 the case of empty variants. */
835 ada_size
836 = merge_sizes (ada_size, pos, this_ada_size,
837 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
838 size = merge_sizes (size, pos, this_size,
839 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
840 size_unit
841 = merge_sizes (size_unit, byte_position (field), this_size_unit,
842 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
843 break;
844
845 default:
846 abort ();
847 }
848 }
849
850 if (code == QUAL_UNION_TYPE)
851 nreverse (fieldlist);
852
853 /* If this is a padding record, we never want to make the size smaller than
854 what was specified in it, if any. */
855 if (TREE_CODE (record_type) == RECORD_TYPE
856 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
857 {
858 size = TYPE_SIZE (record_type);
859 size_unit = TYPE_SIZE_UNIT (record_type);
860 }
861
862 /* Now set any of the values we've just computed that apply. */
863 if (! TYPE_IS_FAT_POINTER_P (record_type)
864 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
865 SET_TYPE_ADA_SIZE (record_type, ada_size);
866
867 if (has_rep)
868 {
869 if (! (TREE_CODE (record_type) == RECORD_TYPE
870 && TYPE_IS_PADDING_P (record_type)
871 && CONTAINS_PLACEHOLDER_P (size)))
872 {
873 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
874 TYPE_SIZE_UNIT (record_type)
875 = round_up (size_unit,
876 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
877 }
878
879 compute_record_mode (record_type);
880 }
881
882 if (! defer_debug)
883 {
884 /* If this record is of variable size, rename it so that the
885 debugger knows it is and make a new, parallel, record
886 that tells the debugger how the record is laid out. See
887 exp_dbug.ads. But don't do this for records that are padding
888 since they confuse GDB. */
889 if (var_size
890 && ! (TREE_CODE (record_type) == RECORD_TYPE
891 && TYPE_IS_PADDING_P (record_type)))
892 {
893 tree new_record_type
894 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
895 ? UNION_TYPE : TREE_CODE (record_type));
896 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
897 tree new_id
898 = concat_id_with_name (orig_id,
899 TREE_CODE (record_type) == QUAL_UNION_TYPE
900 ? "XVU" : "XVE");
901 tree last_pos = bitsize_zero_node;
902 tree old_field;
903 tree prev_old_field = 0;
904
905 TYPE_NAME (new_record_type) = new_id;
906 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
907 TYPE_STUB_DECL (new_record_type)
908 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
909 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
910 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
911 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
912 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
913
914 /* Now scan all the fields, replacing each field with a new
915 field corresponding to the new encoding. */
916 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
917 old_field = TREE_CHAIN (old_field))
918 {
919 tree field_type = TREE_TYPE (old_field);
920 tree field_name = DECL_NAME (old_field);
921 tree new_field;
922 tree curpos = bit_position (old_field);
923 int var = 0;
924 unsigned int align = 0;
925 tree pos;
926
927 /* See how the position was modified from the last position.
928
929 There are two basic cases we support: a value was added
930 to the last position or the last position was rounded to
931 a boundary and they something was added. Check for the
932 first case first. If not, see if there is any evidence
933 of rounding. If so, round the last position and try
934 again.
935
936 If this is a union, the position can be taken as zero. */
937
938 if (TREE_CODE (new_record_type) == UNION_TYPE)
939 pos = bitsize_zero_node, align = 0;
940 else
941 pos = compute_related_constant (curpos, last_pos);
942
943 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
944 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
945 {
946 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
947 pos = compute_related_constant (curpos,
948 round_up (last_pos, align));
949 }
950 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
951 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
952 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
953 && host_integerp (TREE_OPERAND
954 (TREE_OPERAND (curpos, 0), 1),
955 1))
956 {
957 align
958 = tree_low_cst
959 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
960 pos = compute_related_constant (curpos,
961 round_up (last_pos, align));
962 }
963 else if (potential_alignment_gap (prev_old_field, old_field,
964 pos))
965 {
966 align = TYPE_ALIGN (field_type);
967 pos = compute_related_constant (curpos,
968 round_up (last_pos, align));
969 }
970
971 /* If we can't compute a position, set it to zero.
972
973 ??? We really should abort here, but it's too much work
974 to get this correct for all cases. */
975
976 if (pos == 0)
977 pos = bitsize_zero_node;
978
979 /* See if this type is variable-size and make a new type
980 and indicate the indirection if so. */
981 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
982 {
983 field_type = build_pointer_type (field_type);
984 var = 1;
985 }
986
987 /* Make a new field name, if necessary. */
988 if (var || align != 0)
989 {
990 char suffix[6];
991
992 if (align != 0)
993 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
994 align / BITS_PER_UNIT);
995 else
996 strcpy (suffix, "XVL");
997
998 field_name = concat_id_with_name (field_name, suffix);
999 }
1000
1001 new_field = create_field_decl (field_name, field_type,
1002 new_record_type, 0,
1003 DECL_SIZE (old_field), pos, 0);
1004 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1005 TYPE_FIELDS (new_record_type) = new_field;
1006
1007 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1008 zero. The only time it's not the last field of the record
1009 is when there are other components at fixed positions after
1010 it (meaning there was a rep clause for every field) and we
1011 want to be able to encode them. */
1012 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1013 (TREE_CODE (TREE_TYPE (old_field))
1014 == QUAL_UNION_TYPE)
1015 ? bitsize_zero_node
1016 : DECL_SIZE (old_field));
1017 prev_old_field = old_field;
1018 }
1019
1020 TYPE_FIELDS (new_record_type)
1021 = nreverse (TYPE_FIELDS (new_record_type));
1022
1023 rest_of_type_compilation (new_record_type, global_bindings_p ());
1024 }
1025
1026 rest_of_type_compilation (record_type, global_bindings_p ());
1027 }
1028 }
1029
1030 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1031 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1032 if this represents a QUAL_UNION_TYPE in which case we must look for
1033 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1034 is nonzero, we must take the MAX of the end position of this field
1035 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1036
1037 We return an expression for the size. */
1038
1039 static tree
1040 merge_sizes (tree last_size,
1041 tree first_bit,
1042 tree size,
1043 int special,
1044 int has_rep)
1045 {
1046 tree type = TREE_TYPE (last_size);
1047 tree new;
1048
1049 if (! special || TREE_CODE (size) != COND_EXPR)
1050 {
1051 new = size_binop (PLUS_EXPR, first_bit, size);
1052 if (has_rep)
1053 new = size_binop (MAX_EXPR, last_size, new);
1054 }
1055
1056 else
1057 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1058 integer_zerop (TREE_OPERAND (size, 1))
1059 ? last_size : merge_sizes (last_size, first_bit,
1060 TREE_OPERAND (size, 1),
1061 1, has_rep),
1062 integer_zerop (TREE_OPERAND (size, 2))
1063 ? last_size : merge_sizes (last_size, first_bit,
1064 TREE_OPERAND (size, 2),
1065 1, has_rep)));
1066
1067 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1068 when fed through substitute_in_expr) into thinking that a constant
1069 size is not constant. */
1070 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1071 new = TREE_OPERAND (new, 0);
1072
1073 return new;
1074 }
1075
1076 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1077 related by the addition of a constant. Return that constant if so. */
1078
1079 static tree
1080 compute_related_constant (tree op0, tree op1)
1081 {
1082 tree op0_var, op1_var;
1083 tree op0_con = split_plus (op0, &op0_var);
1084 tree op1_con = split_plus (op1, &op1_var);
1085 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1086
1087 if (operand_equal_p (op0_var, op1_var, 0))
1088 return result;
1089 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1090 return result;
1091 else
1092 return 0;
1093 }
1094
1095 /* Utility function of above to split a tree OP which may be a sum, into a
1096 constant part, which is returned, and a variable part, which is stored
1097 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1098 bitsizetype. */
1099
1100 static tree
1101 split_plus (tree in, tree *pvar)
1102 {
1103 /* Strip NOPS in order to ease the tree traversal and maximize the
1104 potential for constant or plus/minus discovery. We need to be careful
1105 to always return and set *pvar to bitsizetype trees, but it's worth
1106 the effort. */
1107 STRIP_NOPS (in);
1108
1109 *pvar = convert (bitsizetype, in);
1110
1111 if (TREE_CODE (in) == INTEGER_CST)
1112 {
1113 *pvar = bitsize_zero_node;
1114 return convert (bitsizetype, in);
1115 }
1116 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1117 {
1118 tree lhs_var, rhs_var;
1119 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1120 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1121
1122 if (lhs_var == TREE_OPERAND (in, 0)
1123 && rhs_var == TREE_OPERAND (in, 1))
1124 return bitsize_zero_node;
1125
1126 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1127 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1128 }
1129 else
1130 return bitsize_zero_node;
1131 }
1132 \f
1133 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1134 subprogram. If it is void_type_node, then we are dealing with a procedure,
1135 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1136 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1137 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1138 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1139 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1140 RETURNS_WITH_DSP is nonzero if the function is to return with a
1141 depressed stack pointer. */
1142
1143 tree
1144 create_subprog_type (tree return_type,
1145 tree param_decl_list,
1146 tree cico_list,
1147 int returns_unconstrained,
1148 int returns_by_ref,
1149 int returns_with_dsp)
1150 {
1151 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1152 the subprogram formal parameters. This list is generated by traversing the
1153 input list of PARM_DECL nodes. */
1154 tree param_type_list = NULL;
1155 tree param_decl;
1156 tree type;
1157
1158 for (param_decl = param_decl_list; param_decl;
1159 param_decl = TREE_CHAIN (param_decl))
1160 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1161 param_type_list);
1162
1163 /* The list of the function parameter types has to be terminated by the void
1164 type to signal to the back-end that we are not dealing with a variable
1165 parameter subprogram, but that the subprogram has a fixed number of
1166 parameters. */
1167 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1168
1169 /* The list of argument types has been created in reverse
1170 so nreverse it. */
1171 param_type_list = nreverse (param_type_list);
1172
1173 type = build_function_type (return_type, param_type_list);
1174
1175 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1176 or the new type should, make a copy of TYPE. Likewise for
1177 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1178 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1179 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1180 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1181 type = copy_type (type);
1182
1183 SET_TYPE_CI_CO_LIST (type, cico_list);
1184 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1185 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1186 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1187 return type;
1188 }
1189 \f
1190 /* Return a copy of TYPE but safe to modify in any way. */
1191
1192 tree
1193 copy_type (tree type)
1194 {
1195 tree new = copy_node (type);
1196
1197 /* copy_node clears this field instead of copying it, because it is
1198 aliased with TREE_CHAIN. */
1199 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1200
1201 TYPE_POINTER_TO (new) = 0;
1202 TYPE_REFERENCE_TO (new) = 0;
1203 TYPE_MAIN_VARIANT (new) = new;
1204 TYPE_NEXT_VARIANT (new) = 0;
1205
1206 return new;
1207 }
1208 \f
1209 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1210 TYPE_INDEX_TYPE is INDEX. */
1211
1212 tree
1213 create_index_type (tree min, tree max, tree index)
1214 {
1215 /* First build a type for the desired range. */
1216 tree type = build_index_2_type (min, max);
1217
1218 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1219 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1220 is set, but not to INDEX, make a copy of this type with the requested
1221 index type. Note that we have no way of sharing these types, but that's
1222 only a small hole. */
1223 if (TYPE_INDEX_TYPE (type) == index)
1224 return type;
1225 else if (TYPE_INDEX_TYPE (type) != 0)
1226 type = copy_type (type);
1227
1228 SET_TYPE_INDEX_TYPE (type, index);
1229 return type;
1230 }
1231 \f
1232 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1233 string) and TYPE is a ..._TYPE node giving its data type.
1234 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1235 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1236 information about this type. */
1237
1238 tree
1239 create_type_decl (tree type_name,
1240 tree type,
1241 struct attrib *attr_list,
1242 int artificial_p,
1243 int debug_info_p)
1244 {
1245 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1246 enum tree_code code = TREE_CODE (type);
1247
1248 DECL_ARTIFICIAL (type_decl) = artificial_p;
1249 pushdecl (type_decl);
1250 process_attributes (type_decl, attr_list);
1251
1252 /* Pass type declaration information to the debugger unless this is an
1253 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1254 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1255 a dummy type, which will be completed later, or a type for which
1256 debugging information was not requested. */
1257 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1258 || ! debug_info_p)
1259 DECL_IGNORED_P (type_decl) = 1;
1260 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1261 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1262 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1263 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1264
1265 return type_decl;
1266 }
1267
1268 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1269 ASM_NAME is its assembler name (if provided). TYPE is its data type
1270 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1271 expression; NULL_TREE if none.
1272
1273 CONST_FLAG is nonzero if this variable is constant.
1274
1275 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1276 the current compilation unit. This flag should be set when processing the
1277 variable definitions in a package specification. EXTERN_FLAG is nonzero
1278 when processing an external variable declaration (as opposed to a
1279 definition: no storage is to be allocated for the variable here).
1280
1281 STATIC_FLAG is only relevant when not at top level. In that case
1282 it indicates whether to always allocate storage to the variable. */
1283
1284 tree
1285 create_var_decl (tree var_name,
1286 tree asm_name,
1287 tree type,
1288 tree var_init,
1289 int const_flag,
1290 int public_flag,
1291 int extern_flag,
1292 int static_flag,
1293 struct attrib *attr_list)
1294 {
1295 int init_const
1296 = (var_init == 0
1297 ? 0
1298 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1299 && (global_bindings_p () || static_flag
1300 ? 0 != initializer_constant_valid_p (var_init,
1301 TREE_TYPE (var_init))
1302 : TREE_CONSTANT (var_init))));
1303 tree var_decl
1304 = build_decl ((const_flag && init_const
1305 /* Only make a CONST_DECL for sufficiently-small objects.
1306 We consider complex double "sufficiently-small" */
1307 && TYPE_SIZE (type) != 0
1308 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1309 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1310 GET_MODE_SIZE (DCmode)))
1311 ? CONST_DECL : VAR_DECL, var_name, type);
1312 tree assign_init = 0;
1313
1314 /* If this is external, throw away any initializations unless this is a
1315 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1316 we are defining a global here, leave a constant initialization and save
1317 any variable elaborations for the elaboration routine. Otherwise, if
1318 the initializing expression is not the same as TYPE, generate the
1319 initialization with an assignment statement, since it knows how
1320 to do the required adjustents. If we are just annotating types,
1321 throw away the initialization if it isn't a constant. */
1322
1323 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1324 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1325 var_init = 0;
1326
1327 if (global_bindings_p () && var_init != 0 && ! init_const)
1328 {
1329 add_pending_elaborations (var_decl, var_init);
1330 var_init = 0;
1331 }
1332
1333 else if (var_init != 0
1334 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1335 != TYPE_MAIN_VARIANT (type))
1336 || (static_flag && ! init_const)))
1337 assign_init = var_init, var_init = 0;
1338
1339 DECL_COMMON (var_decl) = !flag_no_common;
1340 DECL_INITIAL (var_decl) = var_init;
1341 TREE_READONLY (var_decl) = const_flag;
1342 DECL_EXTERNAL (var_decl) = extern_flag;
1343 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1344 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1345 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1346 = TYPE_VOLATILE (type);
1347
1348 /* At the global binding level we need to allocate static storage for the
1349 variable if and only if its not external. If we are not at the top level
1350 we allocate automatic storage unless requested not to. */
1351 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1352
1353 if (asm_name != 0)
1354 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1355
1356 process_attributes (var_decl, attr_list);
1357
1358 /* Add this decl to the current binding level and generate any
1359 needed code and RTL. */
1360 var_decl = pushdecl (var_decl);
1361 expand_decl (var_decl);
1362
1363 if (DECL_CONTEXT (var_decl) != 0)
1364 expand_decl_init (var_decl);
1365
1366 /* If this is volatile, force it into memory. */
1367 if (TREE_SIDE_EFFECTS (var_decl))
1368 gnat_mark_addressable (var_decl);
1369
1370 if (TREE_CODE (var_decl) != CONST_DECL)
1371 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1372
1373 if (assign_init != 0)
1374 {
1375 /* If VAR_DECL has a padded type, convert it to the unpadded
1376 type so the assignment is done properly. */
1377 tree lhs = var_decl;
1378
1379 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1380 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1381 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1382
1383 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1384 assign_init));
1385 }
1386
1387 return var_decl;
1388 }
1389 \f
1390 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1391 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1392 this field is in a record type with a "pragma pack". If SIZE is nonzero
1393 it is the specified size for this field. If POS is nonzero, it is the bit
1394 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1395 the address of this field for aliasing purposes. */
1396
1397 tree
1398 create_field_decl (tree field_name,
1399 tree field_type,
1400 tree record_type,
1401 int packed,
1402 tree size,
1403 tree pos,
1404 int addressable)
1405 {
1406 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1407
1408 DECL_CONTEXT (field_decl) = record_type;
1409 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1410
1411 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1412 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1413 if (packed && TYPE_MODE (field_type) == BLKmode)
1414 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1415
1416 /* If a size is specified, use it. Otherwise, if the record type is packed
1417 compute a size to use, which may differ from the object's natural size.
1418 We always set a size in this case to trigger the checks for bitfield
1419 creation below, which is typically required when no position has been
1420 specified. */
1421 if (size != 0)
1422 size = convert (bitsizetype, size);
1423 else if (packed == 1)
1424 {
1425 size = rm_size (field_type);
1426
1427 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1428 byte. */
1429 if (TREE_CODE (size) == INTEGER_CST
1430 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1431 size = round_up (size, BITS_PER_UNIT);
1432 }
1433
1434 /* Make a bitfield if a size is specified for two reasons: first if the size
1435 differs from the natural size. Second, if the alignment is insufficient.
1436 There are a number of ways the latter can be true.
1437
1438 We never make a bitfield if the type of the field has a nonconstant size,
1439 or if it is claimed to be addressable, because no such entity requiring
1440 bitfield operations should reach here.
1441
1442 We do *preventively* make a bitfield when there might be the need for it
1443 but we don't have all the necessary information to decide, as is the case
1444 of a field with no specified position in a packed record.
1445
1446 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1447 in layout_decl or finish_record_type to clear the bit_field indication if
1448 it is in fact not needed. */
1449 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1450 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1451 && ! addressable
1452 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1453 || (pos != 0
1454 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1455 bitsize_int (TYPE_ALIGN
1456 (field_type)))))
1457 || packed
1458 || (TYPE_ALIGN (record_type) != 0
1459 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1460 {
1461 DECL_BIT_FIELD (field_decl) = 1;
1462 DECL_SIZE (field_decl) = size;
1463 if (! packed && pos == 0)
1464 DECL_ALIGN (field_decl)
1465 = (TYPE_ALIGN (record_type) != 0
1466 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1467 : TYPE_ALIGN (field_type));
1468 }
1469
1470 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1471 DECL_ALIGN (field_decl)
1472 = MAX (DECL_ALIGN (field_decl),
1473 DECL_BIT_FIELD (field_decl) ? 1
1474 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1475 : TYPE_ALIGN (field_type));
1476
1477 if (pos != 0)
1478 {
1479 /* We need to pass in the alignment the DECL is known to have.
1480 This is the lowest-order bit set in POS, but no more than
1481 the alignment of the record, if one is specified. Note
1482 that an alignment of 0 is taken as infinite. */
1483 unsigned int known_align;
1484
1485 if (host_integerp (pos, 1))
1486 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1487 else
1488 known_align = BITS_PER_UNIT;
1489
1490 if (TYPE_ALIGN (record_type)
1491 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1492 known_align = TYPE_ALIGN (record_type);
1493
1494 layout_decl (field_decl, known_align);
1495 SET_DECL_OFFSET_ALIGN (field_decl,
1496 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1497 : BITS_PER_UNIT);
1498 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1499 &DECL_FIELD_BIT_OFFSET (field_decl),
1500 DECL_OFFSET_ALIGN (field_decl), pos);
1501
1502 DECL_HAS_REP_P (field_decl) = 1;
1503 }
1504
1505 /* If the field type is passed by reference, we will have pointers to the
1506 field, so it is addressable. */
1507 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1508 addressable = 1;
1509
1510 /* ??? For now, we say that any field of aggregate type is addressable
1511 because the front end may take 'Reference of it. */
1512 if (AGGREGATE_TYPE_P (field_type))
1513 addressable = 1;
1514
1515 /* Mark the decl as nonaddressable if it is indicated so semantically,
1516 meaning we won't ever attempt to take the address of the field.
1517
1518 It may also be "technically" nonaddressable, meaning that even if we
1519 attempt to take the field's address we will actually get the address of a
1520 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1521 we have at this point is not accurate enough, so we don't account for
1522 this here and let finish_record_type decide. */
1523 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1524
1525 return field_decl;
1526 }
1527
1528 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1529 effects, has the value of zero. */
1530
1531 static int
1532 value_zerop (tree exp)
1533 {
1534 if (TREE_CODE (exp) == COMPOUND_EXPR)
1535 return value_zerop (TREE_OPERAND (exp, 1));
1536
1537 return integer_zerop (exp);
1538 }
1539 \f
1540 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1541 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1542 readonly (either an IN parameter or an address of a pass-by-ref
1543 parameter). */
1544
1545 tree
1546 create_param_decl (tree param_name, tree param_type, int readonly)
1547 {
1548 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1549
1550 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1551 lead to various ABI violations. */
1552 if (targetm.calls.promote_prototypes (param_type)
1553 && (TREE_CODE (param_type) == INTEGER_TYPE
1554 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1555 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1556 {
1557 /* We have to be careful about biased types here. Make a subtype
1558 of integer_type_node with the proper biasing. */
1559 if (TREE_CODE (param_type) == INTEGER_TYPE
1560 && TYPE_BIASED_REPRESENTATION_P (param_type))
1561 {
1562 param_type
1563 = copy_type (build_range_type (integer_type_node,
1564 TYPE_MIN_VALUE (param_type),
1565 TYPE_MAX_VALUE (param_type)));
1566
1567 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1568 }
1569 else
1570 param_type = integer_type_node;
1571 }
1572
1573 DECL_ARG_TYPE (param_decl) = param_type;
1574 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1575 TREE_READONLY (param_decl) = readonly;
1576 return param_decl;
1577 }
1578 \f
1579 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1580
1581 void
1582 process_attributes (tree decl, struct attrib *attr_list)
1583 {
1584 for (; attr_list; attr_list = attr_list->next)
1585 switch (attr_list->type)
1586 {
1587 case ATTR_MACHINE_ATTRIBUTE:
1588 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1589 NULL_TREE),
1590 ATTR_FLAG_TYPE_IN_PLACE);
1591 break;
1592
1593 case ATTR_LINK_ALIAS:
1594 TREE_STATIC (decl) = 1;
1595 assemble_alias (decl, attr_list->name);
1596 break;
1597
1598 case ATTR_WEAK_EXTERNAL:
1599 if (SUPPORTS_WEAK)
1600 declare_weak (decl);
1601 else
1602 post_error ("?weak declarations not supported on this target",
1603 attr_list->error_point);
1604 break;
1605
1606 case ATTR_LINK_SECTION:
1607 if (targetm.have_named_sections)
1608 {
1609 DECL_SECTION_NAME (decl)
1610 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1611 IDENTIFIER_POINTER (attr_list->name));
1612 DECL_COMMON (decl) = 0;
1613 }
1614 else
1615 post_error ("?section attributes are not supported for this target",
1616 attr_list->error_point);
1617 break;
1618 }
1619 }
1620 \f
1621 /* Add some pending elaborations on the list. */
1622
1623 void
1624 add_pending_elaborations (tree var_decl, tree var_init)
1625 {
1626 if (var_init != 0)
1627 Check_Elaboration_Code_Allowed (error_gnat_node);
1628
1629 pending_elaborations
1630 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1631 }
1632
1633 /* Obtain any pending elaborations and clear the old list. */
1634
1635 tree
1636 get_pending_elaborations (void)
1637 {
1638 /* Each thing added to the list went on the end; we want it on the
1639 beginning. */
1640 tree result = TREE_CHAIN (pending_elaborations);
1641
1642 TREE_CHAIN (pending_elaborations) = 0;
1643 return result;
1644 }
1645
1646 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1647 of 2. */
1648
1649 static int
1650 value_factor_p (tree value, int factor)
1651 {
1652 if (host_integerp (value, 1))
1653 return tree_low_cst (value, 1) % factor == 0;
1654
1655 if (TREE_CODE (value) == MULT_EXPR)
1656 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1657 || value_factor_p (TREE_OPERAND (value, 1), factor));
1658
1659 return 0;
1660 }
1661
1662 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1663 unless we can prove these 2 fields are laid out in such a way that no gap
1664 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1665 is the distance in bits between the end of PREV_FIELD and the starting
1666 position of CURR_FIELD. It is ignored if null. */
1667
1668 static int
1669 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1670 {
1671 /* If this is the first field of the record, there cannot be any gap */
1672 if (!prev_field)
1673 return 0;
1674
1675 /* If the previous field is a union type, then return False: The only
1676 time when such a field is not the last field of the record is when
1677 there are other components at fixed positions after it (meaning there
1678 was a rep clause for every field), in which case we don't want the
1679 alignment constraint to override them. */
1680 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1681 return 0;
1682
1683 /* If the distance between the end of prev_field and the begining of
1684 curr_field is constant, then there is a gap if the value of this
1685 constant is not null. */
1686 if (offset && host_integerp (offset, 1))
1687 return (!integer_zerop (offset));
1688
1689 /* If the size and position of the previous field are constant,
1690 then check the sum of this size and position. There will be a gap
1691 iff it is not multiple of the current field alignment. */
1692 if (host_integerp (DECL_SIZE (prev_field), 1)
1693 && host_integerp (bit_position (prev_field), 1))
1694 return ((tree_low_cst (bit_position (prev_field), 1)
1695 + tree_low_cst (DECL_SIZE (prev_field), 1))
1696 % DECL_ALIGN (curr_field) != 0);
1697
1698 /* If both the position and size of the previous field are multiples
1699 of the current field alignment, there can not be any gap. */
1700 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1701 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1702 return 0;
1703
1704 /* Fallback, return that there may be a potential gap */
1705 return 1;
1706 }
1707
1708 /* Return nonzero if there are pending elaborations. */
1709
1710 int
1711 pending_elaborations_p (void)
1712 {
1713 return TREE_CHAIN (pending_elaborations) != 0;
1714 }
1715
1716 /* Save a copy of the current pending elaboration list and make a new
1717 one. */
1718
1719 void
1720 push_pending_elaborations (void)
1721 {
1722 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1723
1724 p->next = elist_stack;
1725 p->elab_list = pending_elaborations;
1726 elist_stack = p;
1727 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1728 }
1729
1730 /* Pop the stack of pending elaborations. */
1731
1732 void
1733 pop_pending_elaborations (void)
1734 {
1735 struct e_stack *p = elist_stack;
1736
1737 pending_elaborations = p->elab_list;
1738 elist_stack = p->next;
1739 }
1740
1741 /* Return the current position in pending_elaborations so we can insert
1742 elaborations after that point. */
1743
1744 tree
1745 get_elaboration_location (void)
1746 {
1747 return tree_last (pending_elaborations);
1748 }
1749
1750 /* Insert the current elaborations after ELAB, which is in some elaboration
1751 list. */
1752
1753 void
1754 insert_elaboration_list (tree elab)
1755 {
1756 tree next = TREE_CHAIN (elab);
1757
1758 if (TREE_CHAIN (pending_elaborations))
1759 {
1760 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1761 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1762 TREE_CHAIN (pending_elaborations) = 0;
1763 }
1764 }
1765
1766 /* Returns a LABEL_DECL node for LABEL_NAME. */
1767
1768 tree
1769 create_label_decl (tree label_name)
1770 {
1771 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1772
1773 DECL_CONTEXT (label_decl) = current_function_decl;
1774 DECL_MODE (label_decl) = VOIDmode;
1775 DECL_SOURCE_LOCATION (label_decl) = input_location;
1776
1777 return label_decl;
1778 }
1779 \f
1780 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1781 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1782 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1783 PARM_DECL nodes chained through the TREE_CHAIN field).
1784
1785 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1786 appropriate fields in the FUNCTION_DECL. */
1787
1788 tree
1789 create_subprog_decl (tree subprog_name,
1790 tree asm_name,
1791 tree subprog_type,
1792 tree param_decl_list,
1793 int inline_flag,
1794 int public_flag,
1795 int extern_flag,
1796 struct attrib *attr_list)
1797 {
1798 tree return_type = TREE_TYPE (subprog_type);
1799 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1800
1801 /* If this is a function nested inside an inlined external function, it
1802 means we aren't going to compile the outer function unless it is
1803 actually inlined, so do the same for us. */
1804 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1805 && DECL_EXTERNAL (current_function_decl))
1806 extern_flag = 1;
1807
1808 DECL_EXTERNAL (subprog_decl) = extern_flag;
1809 TREE_PUBLIC (subprog_decl) = public_flag;
1810 DECL_INLINE (subprog_decl) = inline_flag;
1811 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1812 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1813 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1814 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1815 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1816
1817 if (asm_name != 0)
1818 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1819
1820 process_attributes (subprog_decl, attr_list);
1821
1822 /* Add this decl to the current binding level. */
1823 subprog_decl = pushdecl (subprog_decl);
1824
1825 /* Output the assembler code and/or RTL for the declaration. */
1826 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1827
1828 return subprog_decl;
1829 }
1830 \f
1831 /* Count how deep we are into nested functions. This is because
1832 we shouldn't call the backend function context routines unless we
1833 are in a nested function. */
1834
1835 static int function_nesting_depth;
1836
1837 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1838 body. This routine needs to be invoked before processing the declarations
1839 appearing in the subprogram. */
1840
1841 void
1842 begin_subprog_body (tree subprog_decl)
1843 {
1844 tree param_decl_list;
1845 tree param_decl;
1846 tree next_param;
1847
1848 if (function_nesting_depth++ != 0)
1849 push_function_context ();
1850
1851 announce_function (subprog_decl);
1852
1853 /* Make this field nonzero so further routines know that this is not
1854 tentative. error_mark_node is replaced below (in poplevel) with the
1855 adequate BLOCK. */
1856 DECL_INITIAL (subprog_decl) = error_mark_node;
1857
1858 /* This function exists in static storage. This does not mean `static' in
1859 the C sense! */
1860 TREE_STATIC (subprog_decl) = 1;
1861
1862 /* Enter a new binding level. */
1863 current_function_decl = subprog_decl;
1864 pushlevel (0);
1865
1866 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1867 subprogram body) so that they can be recognized as local variables in the
1868 subprogram.
1869
1870 The list of PARM_DECL nodes is stored in the right order in
1871 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1872 which they are transmitted to `pushdecl' we need to reverse the list of
1873 PARM_DECLs if we want it to be stored in the right order. The reason why
1874 we want to make sure the PARM_DECLs are stored in the correct order is
1875 that this list will be retrieved in a few lines with a call to `getdecl'
1876 to store it back into the DECL_ARGUMENTS field. */
1877 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1878
1879 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1880 {
1881 next_param = TREE_CHAIN (param_decl);
1882 TREE_CHAIN (param_decl) = NULL;
1883 pushdecl (param_decl);
1884 }
1885
1886 /* Store back the PARM_DECL nodes. They appear in the right order. */
1887 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1888
1889 init_function_start (subprog_decl);
1890 expand_function_start (subprog_decl, 0);
1891
1892 /* If this function is `main', emit a call to `__main'
1893 to run global initializers, etc. */
1894 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1895 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1896 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1897 expand_main_function ();
1898 }
1899
1900 /* Finish the definition of the current subprogram and compile it all the way
1901 to assembler language output. */
1902
1903 void
1904 end_subprog_body (void)
1905 {
1906 tree decl;
1907 tree cico_list;
1908
1909 poplevel (1, 0, 1);
1910 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1911 = current_function_decl;
1912
1913 /* Mark the RESULT_DECL as being in this subprogram. */
1914 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1915
1916 expand_function_end ();
1917
1918 /* If this is a nested function, push a new GC context. That will keep
1919 local variables on the stack from being collected while we're doing
1920 the compilation of this function. */
1921 if (function_nesting_depth > 1)
1922 ggc_push_context ();
1923
1924 /* If we're only annotating types, don't actually compile this
1925 function. */
1926 if (!type_annotate_only)
1927 {
1928 rest_of_compilation (current_function_decl);
1929 if (! DECL_DEFER_OUTPUT (current_function_decl))
1930 {
1931 free_after_compilation (cfun);
1932 DECL_STRUCT_FUNCTION (current_function_decl) = 0;
1933 }
1934 cfun = 0;
1935 }
1936
1937 if (function_nesting_depth > 1)
1938 ggc_pop_context ();
1939
1940 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1941 not be seen when we call this function and will be in
1942 unallocated memory anyway. */
1943 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1944 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1945 TREE_VALUE (cico_list) = 0;
1946
1947 if (DECL_STRUCT_FUNCTION (current_function_decl) == 0)
1948 {
1949 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1950 was saved for inline, in which case the DECL_RTLs are in
1951 preserved memory. */
1952 for (decl = DECL_ARGUMENTS (current_function_decl);
1953 decl != 0; decl = TREE_CHAIN (decl))
1954 {
1955 SET_DECL_RTL (decl, 0);
1956 DECL_INCOMING_RTL (decl) = 0;
1957 }
1958
1959 /* Similarly, discard DECL_RTL of the return value. */
1960 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1961
1962 /* But DECL_INITIAL must remain nonzero so we know this
1963 was an actual function definition unless toplev.c decided not
1964 to inline it. */
1965 if (DECL_INITIAL (current_function_decl) != 0)
1966 DECL_INITIAL (current_function_decl) = error_mark_node;
1967
1968 DECL_ARGUMENTS (current_function_decl) = 0;
1969 }
1970
1971 /* If we are not at the bottom of the function nesting stack, pop up to
1972 the containing function. Otherwise show we aren't in any function. */
1973 if (--function_nesting_depth != 0)
1974 pop_function_context ();
1975 else
1976 current_function_decl = 0;
1977 }
1978 \f
1979 /* Return a definition for a builtin function named NAME and whose data type
1980 is TYPE. TYPE should be a function type with argument types.
1981 FUNCTION_CODE tells later passes how to compile calls to this function.
1982 See tree.h for its possible values.
1983
1984 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1985 the name to be called if we can't opencode the function. If
1986 ATTRS is nonzero, use that for the function attribute list. */
1987
1988 tree
1989 builtin_function (const char *name,
1990 tree type,
1991 int function_code,
1992 enum built_in_class class,
1993 const char *library_name,
1994 tree attrs)
1995 {
1996 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1997
1998 DECL_EXTERNAL (decl) = 1;
1999 TREE_PUBLIC (decl) = 1;
2000 if (library_name)
2001 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
2002
2003 pushdecl (decl);
2004 DECL_BUILT_IN_CLASS (decl) = class;
2005 DECL_FUNCTION_CODE (decl) = function_code;
2006 if (attrs)
2007 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
2008 return decl;
2009 }
2010
2011 /* Return an integer type with the number of bits of precision given by
2012 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2013 it is a signed type. */
2014
2015 tree
2016 gnat_type_for_size (unsigned precision, int unsignedp)
2017 {
2018 tree t;
2019 char type_name[20];
2020
2021 if (precision <= 2 * MAX_BITS_PER_WORD
2022 && signed_and_unsigned_types[precision][unsignedp] != 0)
2023 return signed_and_unsigned_types[precision][unsignedp];
2024
2025 if (unsignedp)
2026 t = make_unsigned_type (precision);
2027 else
2028 t = make_signed_type (precision);
2029
2030 if (precision <= 2 * MAX_BITS_PER_WORD)
2031 signed_and_unsigned_types[precision][unsignedp] = t;
2032
2033 if (TYPE_NAME (t) == 0)
2034 {
2035 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2036 TYPE_NAME (t) = get_identifier (type_name);
2037 }
2038
2039 return t;
2040 }
2041
2042 /* Likewise for floating-point types. */
2043
2044 static tree
2045 float_type_for_precision (int precision, enum machine_mode mode)
2046 {
2047 tree t;
2048 char type_name[20];
2049
2050 if (float_types[(int) mode] != 0)
2051 return float_types[(int) mode];
2052
2053 float_types[(int) mode] = t = make_node (REAL_TYPE);
2054 TYPE_PRECISION (t) = precision;
2055 layout_type (t);
2056
2057 if (TYPE_MODE (t) != mode)
2058 gigi_abort (414);
2059
2060 if (TYPE_NAME (t) == 0)
2061 {
2062 sprintf (type_name, "FLOAT_%d", precision);
2063 TYPE_NAME (t) = get_identifier (type_name);
2064 }
2065
2066 return t;
2067 }
2068
2069 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2070 an unsigned type; otherwise a signed type is returned. */
2071
2072 tree
2073 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2074 {
2075 if (mode == BLKmode)
2076 return NULL_TREE;
2077 else if (mode == VOIDmode)
2078 return void_type_node;
2079 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2080 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2081 else
2082 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2083 }
2084
2085 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2086
2087 tree
2088 gnat_unsigned_type (tree type_node)
2089 {
2090 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2091
2092 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2093 {
2094 type = copy_node (type);
2095 TREE_TYPE (type) = type_node;
2096 }
2097 else if (TREE_TYPE (type_node) != 0
2098 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2099 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2100 {
2101 type = copy_node (type);
2102 TREE_TYPE (type) = TREE_TYPE (type_node);
2103 }
2104
2105 return type;
2106 }
2107
2108 /* Return the signed version of a TYPE_NODE, a scalar type. */
2109
2110 tree
2111 gnat_signed_type (tree type_node)
2112 {
2113 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2114
2115 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2116 {
2117 type = copy_node (type);
2118 TREE_TYPE (type) = type_node;
2119 }
2120 else if (TREE_TYPE (type_node) != 0
2121 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2122 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2123 {
2124 type = copy_node (type);
2125 TREE_TYPE (type) = TREE_TYPE (type_node);
2126 }
2127
2128 return type;
2129 }
2130
2131 /* Return a type the same as TYPE except unsigned or signed according to
2132 UNSIGNEDP. */
2133
2134 tree
2135 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2136 {
2137 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
2138 return type;
2139 else
2140 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2141 }
2142 \f
2143 /* EXP is an expression for the size of an object. If this size contains
2144 discriminant references, replace them with the maximum (if MAX_P) or
2145 minimum (if ! MAX_P) possible value of the discriminant. */
2146
2147 tree
2148 max_size (tree exp, int max_p)
2149 {
2150 enum tree_code code = TREE_CODE (exp);
2151 tree type = TREE_TYPE (exp);
2152
2153 switch (TREE_CODE_CLASS (code))
2154 {
2155 case 'd':
2156 case 'c':
2157 return exp;
2158
2159 case 'x':
2160 if (code == TREE_LIST)
2161 return tree_cons (TREE_PURPOSE (exp),
2162 max_size (TREE_VALUE (exp), max_p),
2163 TREE_CHAIN (exp) != 0
2164 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2165 break;
2166
2167 case 'r':
2168 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2169 modify. Otherwise, we treat it like a variable. */
2170 if (! CONTAINS_PLACEHOLDER_P (exp))
2171 return exp;
2172
2173 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2174 return
2175 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2176
2177 case '<':
2178 return max_p ? size_one_node : size_zero_node;
2179
2180 case '1':
2181 case '2':
2182 case 'e':
2183 switch (TREE_CODE_LENGTH (code))
2184 {
2185 case 1:
2186 if (code == NON_LVALUE_EXPR)
2187 return max_size (TREE_OPERAND (exp, 0), max_p);
2188 else
2189 return
2190 fold (build1 (code, type,
2191 max_size (TREE_OPERAND (exp, 0),
2192 code == NEGATE_EXPR ? ! max_p : max_p)));
2193
2194 case 2:
2195 if (code == RTL_EXPR)
2196 gigi_abort (407);
2197 else if (code == COMPOUND_EXPR)
2198 return max_size (TREE_OPERAND (exp, 1), max_p);
2199
2200 {
2201 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2202 tree rhs = max_size (TREE_OPERAND (exp, 1),
2203 code == MINUS_EXPR ? ! max_p : max_p);
2204
2205 /* Special-case wanting the maximum value of a MIN_EXPR.
2206 In that case, if one side overflows, return the other.
2207 sizetype is signed, but we know sizes are non-negative.
2208 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2209 overflowing or the maximum possible value and the RHS
2210 a variable. */
2211 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2212 return lhs;
2213 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2214 return rhs;
2215 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2216 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2217 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2218 && ! TREE_CONSTANT (rhs))
2219 return lhs;
2220 else
2221 return fold (build (code, type, lhs, rhs));
2222 }
2223
2224 case 3:
2225 if (code == SAVE_EXPR)
2226 return exp;
2227 else if (code == COND_EXPR)
2228 return fold (build (MAX_EXPR, type,
2229 max_size (TREE_OPERAND (exp, 1), max_p),
2230 max_size (TREE_OPERAND (exp, 2), max_p)));
2231 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2232 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2233 max_size (TREE_OPERAND (exp, 1), max_p));
2234 }
2235 }
2236
2237 gigi_abort (408);
2238 }
2239 \f
2240 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2241 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2242 Return a constructor for the template. */
2243
2244 tree
2245 build_template (tree template_type, tree array_type, tree expr)
2246 {
2247 tree template_elts = NULL_TREE;
2248 tree bound_list = NULL_TREE;
2249 tree field;
2250
2251 if (TREE_CODE (array_type) == RECORD_TYPE
2252 && (TYPE_IS_PADDING_P (array_type)
2253 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2254 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2255
2256 if (TREE_CODE (array_type) == ARRAY_TYPE
2257 || (TREE_CODE (array_type) == INTEGER_TYPE
2258 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2259 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2260
2261 /* First make the list for a CONSTRUCTOR for the template. Go down the
2262 field list of the template instead of the type chain because this
2263 array might be an Ada array of arrays and we can't tell where the
2264 nested arrays stop being the underlying object. */
2265
2266 for (field = TYPE_FIELDS (template_type); field;
2267 (bound_list != 0
2268 ? (bound_list = TREE_CHAIN (bound_list))
2269 : (array_type = TREE_TYPE (array_type))),
2270 field = TREE_CHAIN (TREE_CHAIN (field)))
2271 {
2272 tree bounds, min, max;
2273
2274 /* If we have a bound list, get the bounds from there. Likewise
2275 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2276 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2277 This will give us a maximum range. */
2278 if (bound_list != 0)
2279 bounds = TREE_VALUE (bound_list);
2280 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2281 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2282 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2283 && DECL_BY_COMPONENT_PTR_P (expr))
2284 bounds = TREE_TYPE (field);
2285 else
2286 gigi_abort (411);
2287
2288 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2289 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2290
2291 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2292 substitute it from OBJECT. */
2293 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2294 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2295
2296 template_elts = tree_cons (TREE_CHAIN (field), max,
2297 tree_cons (field, min, template_elts));
2298 }
2299
2300 return gnat_build_constructor (template_type, nreverse (template_elts));
2301 }
2302 \f
2303 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2304 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2305 in the type contains in its DECL_INITIAL the expression to use when
2306 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2307 to print out an error message if the mechanism cannot be applied to
2308 an object of that type and also for the name. */
2309
2310 tree
2311 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2312 {
2313 tree record_type = make_node (RECORD_TYPE);
2314 tree field_list = 0;
2315 int class;
2316 int dtype = 0;
2317 tree inner_type;
2318 int ndim;
2319 int i;
2320 tree *idx_arr;
2321 tree tem;
2322
2323 /* If TYPE is an unconstrained array, use the underlying array type. */
2324 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2325 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2326
2327 /* If this is an array, compute the number of dimensions in the array,
2328 get the index types, and point to the inner type. */
2329 if (TREE_CODE (type) != ARRAY_TYPE)
2330 ndim = 0;
2331 else
2332 for (ndim = 1, inner_type = type;
2333 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2334 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2335 ndim++, inner_type = TREE_TYPE (inner_type))
2336 ;
2337
2338 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2339
2340 if (mech != By_Descriptor_NCA
2341 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2342 for (i = ndim - 1, inner_type = type;
2343 i >= 0;
2344 i--, inner_type = TREE_TYPE (inner_type))
2345 idx_arr[i] = TYPE_DOMAIN (inner_type);
2346 else
2347 for (i = 0, inner_type = type;
2348 i < ndim;
2349 i++, inner_type = TREE_TYPE (inner_type))
2350 idx_arr[i] = TYPE_DOMAIN (inner_type);
2351
2352 /* Now get the DTYPE value. */
2353 switch (TREE_CODE (type))
2354 {
2355 case INTEGER_TYPE:
2356 case ENUMERAL_TYPE:
2357 if (TYPE_VAX_FLOATING_POINT_P (type))
2358 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2359 {
2360 case 6:
2361 dtype = 10;
2362 break;
2363 case 9:
2364 dtype = 11;
2365 break;
2366 case 15:
2367 dtype = 27;
2368 break;
2369 }
2370 else
2371 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2372 {
2373 case 8:
2374 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2375 break;
2376 case 16:
2377 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2378 break;
2379 case 32:
2380 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2381 break;
2382 case 64:
2383 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2384 break;
2385 case 128:
2386 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2387 break;
2388 }
2389 break;
2390
2391 case REAL_TYPE:
2392 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2393 break;
2394
2395 case COMPLEX_TYPE:
2396 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2397 && TYPE_VAX_FLOATING_POINT_P (type))
2398 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2399 {
2400 case 6:
2401 dtype = 12;
2402 break;
2403 case 9:
2404 dtype = 13;
2405 break;
2406 case 15:
2407 dtype = 29;
2408 }
2409 else
2410 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2411 break;
2412
2413 case ARRAY_TYPE:
2414 dtype = 14;
2415 break;
2416
2417 default:
2418 break;
2419 }
2420
2421 /* Get the CLASS value. */
2422 switch (mech)
2423 {
2424 case By_Descriptor_A:
2425 class = 4;
2426 break;
2427 case By_Descriptor_NCA:
2428 class = 10;
2429 break;
2430 case By_Descriptor_SB:
2431 class = 15;
2432 break;
2433 default:
2434 class = 1;
2435 }
2436
2437 /* Make the type for a descriptor for VMS. The first four fields
2438 are the same for all types. */
2439
2440 field_list
2441 = chainon (field_list,
2442 make_descriptor_field
2443 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2444 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2445
2446 field_list = chainon (field_list,
2447 make_descriptor_field ("DTYPE",
2448 gnat_type_for_size (8, 1),
2449 record_type, size_int (dtype)));
2450 field_list = chainon (field_list,
2451 make_descriptor_field ("CLASS",
2452 gnat_type_for_size (8, 1),
2453 record_type, size_int (class)));
2454
2455 field_list
2456 = chainon (field_list,
2457 make_descriptor_field ("POINTER",
2458 build_pointer_type (type),
2459 record_type,
2460 build1 (ADDR_EXPR,
2461 build_pointer_type (type),
2462 build (PLACEHOLDER_EXPR,
2463 type))));
2464
2465 switch (mech)
2466 {
2467 case By_Descriptor:
2468 case By_Descriptor_S:
2469 break;
2470
2471 case By_Descriptor_SB:
2472 field_list
2473 = chainon (field_list,
2474 make_descriptor_field
2475 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2476 TREE_CODE (type) == ARRAY_TYPE
2477 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2478 field_list
2479 = chainon (field_list,
2480 make_descriptor_field
2481 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2482 TREE_CODE (type) == ARRAY_TYPE
2483 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2484 break;
2485
2486 case By_Descriptor_A:
2487 case By_Descriptor_NCA:
2488 field_list = chainon (field_list,
2489 make_descriptor_field ("SCALE",
2490 gnat_type_for_size (8, 1),
2491 record_type,
2492 size_zero_node));
2493
2494 field_list = chainon (field_list,
2495 make_descriptor_field ("DIGITS",
2496 gnat_type_for_size (8, 1),
2497 record_type,
2498 size_zero_node));
2499
2500 field_list
2501 = chainon (field_list,
2502 make_descriptor_field
2503 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2504 size_int (mech == By_Descriptor_NCA
2505 ? 0
2506 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2507 : (TREE_CODE (type) == ARRAY_TYPE
2508 && TYPE_CONVENTION_FORTRAN_P (type)
2509 ? 224 : 192))));
2510
2511 field_list = chainon (field_list,
2512 make_descriptor_field ("DIMCT",
2513 gnat_type_for_size (8, 1),
2514 record_type,
2515 size_int (ndim)));
2516
2517 field_list = chainon (field_list,
2518 make_descriptor_field ("ARSIZE",
2519 gnat_type_for_size (32, 1),
2520 record_type,
2521 size_in_bytes (type)));
2522
2523 /* Now build a pointer to the 0,0,0... element. */
2524 tem = build (PLACEHOLDER_EXPR, type);
2525 for (i = 0, inner_type = type; i < ndim;
2526 i++, inner_type = TREE_TYPE (inner_type))
2527 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2528 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2529
2530 field_list
2531 = chainon (field_list,
2532 make_descriptor_field
2533 ("A0", build_pointer_type (inner_type), record_type,
2534 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2535
2536 /* Next come the addressing coefficients. */
2537 tem = size_int (1);
2538 for (i = 0; i < ndim; i++)
2539 {
2540 char fname[3];
2541 tree idx_length
2542 = size_binop (MULT_EXPR, tem,
2543 size_binop (PLUS_EXPR,
2544 size_binop (MINUS_EXPR,
2545 TYPE_MAX_VALUE (idx_arr[i]),
2546 TYPE_MIN_VALUE (idx_arr[i])),
2547 size_int (1)));
2548
2549 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2550 fname[1] = '0' + i, fname[2] = 0;
2551 field_list
2552 = chainon (field_list,
2553 make_descriptor_field (fname,
2554 gnat_type_for_size (32, 1),
2555 record_type, idx_length));
2556
2557 if (mech == By_Descriptor_NCA)
2558 tem = idx_length;
2559 }
2560
2561 /* Finally here are the bounds. */
2562 for (i = 0; i < ndim; i++)
2563 {
2564 char fname[3];
2565
2566 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2567 field_list
2568 = chainon (field_list,
2569 make_descriptor_field
2570 (fname, gnat_type_for_size (32, 1), record_type,
2571 TYPE_MIN_VALUE (idx_arr[i])));
2572
2573 fname[0] = 'U';
2574 field_list
2575 = chainon (field_list,
2576 make_descriptor_field
2577 (fname, gnat_type_for_size (32, 1), record_type,
2578 TYPE_MAX_VALUE (idx_arr[i])));
2579 }
2580 break;
2581
2582 default:
2583 post_error ("unsupported descriptor type for &", gnat_entity);
2584 }
2585
2586 finish_record_type (record_type, field_list, 0, 1);
2587 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2588 record_type));
2589
2590 return record_type;
2591 }
2592
2593 /* Utility routine for above code to make a field. */
2594
2595 static tree
2596 make_descriptor_field (const char *name, tree type,
2597 tree rec_type, tree initial)
2598 {
2599 tree field
2600 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2601
2602 DECL_INITIAL (field) = initial;
2603 return field;
2604 }
2605 \f
2606 /* Build a type to be used to represent an aliased object whose nominal
2607 type is an unconstrained array. This consists of a RECORD_TYPE containing
2608 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2609 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2610 is used to represent an arbitrary unconstrained object. Use NAME
2611 as the name of the record. */
2612
2613 tree
2614 build_unc_object_type (tree template_type, tree object_type, tree name)
2615 {
2616 tree type = make_node (RECORD_TYPE);
2617 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2618 template_type, type, 0, 0, 0, 1);
2619 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2620 type, 0, 0, 0, 1);
2621
2622 TYPE_NAME (type) = name;
2623 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2624 finish_record_type (type,
2625 chainon (chainon (NULL_TREE, template_field),
2626 array_field),
2627 0, 0);
2628
2629 return type;
2630 }
2631 \f
2632 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2633 the normal case this is just two adjustments, but we have more to do
2634 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2635
2636 void
2637 update_pointer_to (tree old_type, tree new_type)
2638 {
2639 tree ptr = TYPE_POINTER_TO (old_type);
2640 tree ref = TYPE_REFERENCE_TO (old_type);
2641 tree type;
2642
2643 /* If this is the main variant, process all the other variants first. */
2644 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2645 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2646 type = TYPE_NEXT_VARIANT (type))
2647 update_pointer_to (type, new_type);
2648
2649 /* If no pointer or reference, we are done. */
2650 if (ptr == 0 && ref == 0)
2651 return;
2652
2653 /* Merge the old type qualifiers in the new type.
2654
2655 Each old variant has qualifiers for specific reasons, and the new
2656 designated type as well. Each set of qualifiers represents useful
2657 information grabbed at some point, and merging the two simply unifies
2658 these inputs into the final type description.
2659
2660 Consider for instance a volatile type frozen after an access to constant
2661 type designating it. After the designated type freeze, we get here with a
2662 volatile new_type and a dummy old_type with a readonly variant, created
2663 when the access type was processed. We shall make a volatile and readonly
2664 designated type, because that's what it really is.
2665
2666 We might also get here for a non-dummy old_type variant with different
2667 qualifiers than the new_type ones, for instance in some cases of pointers
2668 to private record type elaboration (see the comments around the call to
2669 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2670 qualifiers in thoses cases too, to avoid accidentally discarding the
2671 initial set, and will often end up with old_type == new_type then. */
2672 new_type = build_qualified_type (new_type,
2673 TYPE_QUALS (old_type)
2674 | TYPE_QUALS (new_type));
2675
2676 /* If the new type and the old one are identical, there is nothing to
2677 update. */
2678 if (old_type == new_type)
2679 return;
2680
2681 /* Otherwise, first handle the simple case. */
2682 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2683 {
2684 TYPE_POINTER_TO (new_type) = ptr;
2685 TYPE_REFERENCE_TO (new_type) = ref;
2686
2687 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2688 {
2689 TREE_TYPE (ptr) = new_type;
2690
2691 if (TYPE_NAME (ptr) != 0
2692 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2693 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2694 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2695 global_bindings_p (), 0);
2696 }
2697
2698 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2699 {
2700 TREE_TYPE (ref) = new_type;
2701
2702 if (TYPE_NAME (ref) != 0
2703 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2704 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2705 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2706 global_bindings_p (), 0);
2707 }
2708 }
2709
2710 /* Now deal with the unconstrained array case. In this case the "pointer"
2711 is actually a RECORD_TYPE where the types of both fields are
2712 pointers to void. In that case, copy the field list from the
2713 old type to the new one and update the fields' context. */
2714 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2715 gigi_abort (412);
2716
2717 else
2718 {
2719 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2720 tree ptr_temp_type;
2721 tree new_ref;
2722 tree var;
2723
2724 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2725 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2726 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2727
2728 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2729 template bounds.
2730
2731 ??? This is now the only use of gnat_substitute_in_type, which
2732 is now a very "heavy" routine to do this, so it should be replaced
2733 at some point. */
2734 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2735 new_ref = build (COMPONENT_REF, ptr_temp_type,
2736 build (PLACEHOLDER_EXPR, ptr),
2737 TREE_CHAIN (TYPE_FIELDS (ptr)));
2738
2739 update_pointer_to
2740 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2741 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2742 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2743
2744 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2745 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2746
2747 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2748 = TREE_TYPE (new_type) = ptr;
2749
2750 /* Now handle updating the allocation record, what the thin pointer
2751 points to. Update all pointers from the old record into the new
2752 one, update the types of the fields, and recompute the size. */
2753
2754 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2755
2756 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2757 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2758 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2759 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2760 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2761 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2762 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2763
2764 TYPE_SIZE (new_obj_rec)
2765 = size_binop (PLUS_EXPR,
2766 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2767 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2768 TYPE_SIZE_UNIT (new_obj_rec)
2769 = size_binop (PLUS_EXPR,
2770 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2771 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2772 rest_of_type_compilation (ptr, global_bindings_p ());
2773 }
2774 }
2775 \f
2776 /* Convert a pointer to a constrained array into a pointer to a fat
2777 pointer. This involves making or finding a template. */
2778
2779 static tree
2780 convert_to_fat_pointer (tree type, tree expr)
2781 {
2782 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2783 tree template, template_addr;
2784 tree etype = TREE_TYPE (expr);
2785
2786 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2787 pointer to the template and array. */
2788 if (integer_zerop (expr))
2789 return
2790 gnat_build_constructor
2791 (type,
2792 tree_cons (TYPE_FIELDS (type),
2793 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2794 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2795 convert (build_pointer_type (template_type),
2796 expr),
2797 NULL_TREE)));
2798
2799 /* If EXPR is a thin pointer, make the template and data from the record. */
2800
2801 else if (TYPE_THIN_POINTER_P (etype))
2802 {
2803 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2804
2805 expr = save_expr (expr);
2806 if (TREE_CODE (expr) == ADDR_EXPR)
2807 expr = TREE_OPERAND (expr, 0);
2808 else
2809 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2810
2811 template = build_component_ref (expr, NULL_TREE, fields, 0);
2812 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2813 build_component_ref (expr, NULL_TREE,
2814 TREE_CHAIN (fields), 0));
2815 }
2816 else
2817 /* Otherwise, build the constructor for the template. */
2818 template = build_template (template_type, TREE_TYPE (etype), expr);
2819
2820 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2821
2822 /* The result is a CONSTRUCTOR for the fat pointer.
2823
2824 If expr is an argument of a foreign convention subprogram, the type it
2825 points to is directly the component type. In this case, the expression
2826 type may not match the corresponding FIELD_DECL type at this point, so we
2827 call "convert" here to fix that up if necessary. This type consistency is
2828 required, for instance because it ensures that possible later folding of
2829 component_refs against this constructor always yields something of the
2830 same type as the initial reference.
2831
2832 Note that the call to "build_template" above is still fine, because it
2833 will only refer to the provided template_type in this case. */
2834 return
2835 gnat_build_constructor
2836 (type, tree_cons (TYPE_FIELDS (type),
2837 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2838 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2839 template_addr, NULL_TREE)));
2840 }
2841 \f
2842 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2843 is something that is a fat pointer, so convert to it first if it EXPR
2844 is not already a fat pointer. */
2845
2846 static tree
2847 convert_to_thin_pointer (tree type, tree expr)
2848 {
2849 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2850 expr
2851 = convert_to_fat_pointer
2852 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2853
2854 /* We get the pointer to the data and use a NOP_EXPR to make it the
2855 proper GCC type. */
2856 expr
2857 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2858 expr = build1 (NOP_EXPR, type, expr);
2859
2860 return expr;
2861 }
2862 \f
2863 /* Create an expression whose value is that of EXPR,
2864 converted to type TYPE. The TREE_TYPE of the value
2865 is always TYPE. This function implements all reasonable
2866 conversions; callers should filter out those that are
2867 not permitted by the language being compiled. */
2868
2869 tree
2870 convert (tree type, tree expr)
2871 {
2872 enum tree_code code = TREE_CODE (type);
2873 tree etype = TREE_TYPE (expr);
2874 enum tree_code ecode = TREE_CODE (etype);
2875 tree tem;
2876
2877 /* If EXPR is already the right type, we are done. */
2878 if (type == etype)
2879 return expr;
2880 /* If we're converting between two aggregate types that have the same main
2881 variant, just make a NOP_EXPR. */
2882 else if (AGGREGATE_TYPE_P (type)
2883 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2884 return build1 (NOP_EXPR, type, expr);
2885
2886 /* If the input type has padding, remove it by doing a component reference
2887 to the field. If the output type has padding, make a constructor
2888 to build the record. If both input and output have padding and are
2889 of variable size, do this as an unchecked conversion. */
2890 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2891 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2892 && (! TREE_CONSTANT (TYPE_SIZE (type))
2893 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2894 ;
2895 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2896 {
2897 /* If we have just converted to this padded type, just get
2898 the inner expression. */
2899 if (TREE_CODE (expr) == CONSTRUCTOR
2900 && CONSTRUCTOR_ELTS (expr) != 0
2901 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2902 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2903 else
2904 return convert (type, build_component_ref (expr, NULL_TREE,
2905 TYPE_FIELDS (etype), 0));
2906 }
2907 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2908 {
2909 /* If we previously converted from another type and our type is
2910 of variable size, remove the conversion to avoid the need for
2911 variable-size temporaries. */
2912 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2913 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2914 expr = TREE_OPERAND (expr, 0);
2915
2916 /* If we are just removing the padding from expr, convert the original
2917 object if we have variable size. That will avoid the need
2918 for some variable-size temporaries. */
2919 if (TREE_CODE (expr) == COMPONENT_REF
2920 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2921 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2922 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2923 return convert (type, TREE_OPERAND (expr, 0));
2924
2925 /* If the result type is a padded type with a self-referentially-sized
2926 field and the expression type is a record, do this as an
2927 unchecked converstion. */
2928 else if (TREE_CODE (etype) == RECORD_TYPE
2929 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2930 return unchecked_convert (type, expr, 0);
2931
2932 else
2933 return
2934 gnat_build_constructor (type,
2935 tree_cons (TYPE_FIELDS (type),
2936 convert (TREE_TYPE
2937 (TYPE_FIELDS (type)),
2938 expr),
2939 NULL_TREE));
2940 }
2941
2942 /* If the input is a biased type, adjust first. */
2943 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2944 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2945 fold (build1 (GNAT_NOP_EXPR,
2946 TREE_TYPE (etype), expr)),
2947 TYPE_MIN_VALUE (etype))));
2948
2949 /* If the input is a left-justified modular type, we need to extract
2950 the actual object before converting it to any other type with the
2951 exception of an unconstrained array. */
2952 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2953 && code != UNCONSTRAINED_ARRAY_TYPE)
2954 return convert (type, build_component_ref (expr, NULL_TREE,
2955 TYPE_FIELDS (etype), 0));
2956
2957 /* If converting to a type that contains a template, convert to the data
2958 type and then build the template. */
2959 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2960 {
2961 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2962
2963 /* If the source already has a template, get a reference to the
2964 associated array only, as we are going to rebuild a template
2965 for the target type anyway. */
2966 expr = maybe_unconstrained_array (expr);
2967
2968 return
2969 gnat_build_constructor
2970 (type,
2971 tree_cons (TYPE_FIELDS (type),
2972 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2973 obj_type, NULL_TREE),
2974 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2975 convert (obj_type, expr), NULL_TREE)));
2976 }
2977
2978 /* There are some special cases of expressions that we process
2979 specially. */
2980 switch (TREE_CODE (expr))
2981 {
2982 case ERROR_MARK:
2983 return expr;
2984
2985 case TRANSFORM_EXPR:
2986 case NULL_EXPR:
2987 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2988 conversion in gnat_expand_expr. NULL_EXPR does not represent
2989 and actual value, so no conversion is needed. */
2990 expr = copy_node (expr);
2991 TREE_TYPE (expr) = type;
2992 return expr;
2993
2994 case STRING_CST:
2995 case CONSTRUCTOR:
2996 /* If we are converting a STRING_CST to another constrained array type,
2997 just make a new one in the proper type. Likewise for a
2998 CONSTRUCTOR. */
2999 if (code == ecode && AGGREGATE_TYPE_P (etype)
3000 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3001 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3002 {
3003 expr = copy_node (expr);
3004 TREE_TYPE (expr) = type;
3005 return expr;
3006 }
3007 break;
3008
3009 case COMPONENT_REF:
3010 /* If we are converting between two aggregate types of the same
3011 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3012 This avoid unneeded conversions which makes reference computations
3013 more complex. */
3014 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
3015 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3016 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3017 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
3018 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
3019 TREE_OPERAND (expr, 1));
3020
3021 break;
3022
3023 case UNCONSTRAINED_ARRAY_REF:
3024 /* Convert this to the type of the inner array by getting the address of
3025 the array from the template. */
3026 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3027 build_component_ref (TREE_OPERAND (expr, 0),
3028 get_identifier ("P_ARRAY"),
3029 NULL_TREE, 0));
3030 etype = TREE_TYPE (expr);
3031 ecode = TREE_CODE (etype);
3032 break;
3033
3034 case VIEW_CONVERT_EXPR:
3035 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3036 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3037 return convert (type, TREE_OPERAND (expr, 0));
3038 break;
3039
3040 case INDIRECT_REF:
3041 /* If both types are record types, just convert the pointer and
3042 make a new INDIRECT_REF.
3043
3044 ??? Disable this for now since it causes problems with the
3045 code in build_binary_op for MODIFY_EXPR which wants to
3046 strip off conversions. But that code really is a mess and
3047 we need to do this a much better way some time. */
3048 if (0
3049 && (TREE_CODE (type) == RECORD_TYPE
3050 || TREE_CODE (type) == UNION_TYPE)
3051 && (TREE_CODE (etype) == RECORD_TYPE
3052 || TREE_CODE (etype) == UNION_TYPE)
3053 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3054 return build_unary_op (INDIRECT_REF, NULL_TREE,
3055 convert (build_pointer_type (type),
3056 TREE_OPERAND (expr, 0)));
3057 break;
3058
3059 default:
3060 break;
3061 }
3062
3063 /* Check for converting to a pointer to an unconstrained array. */
3064 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3065 return convert_to_fat_pointer (type, expr);
3066
3067 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3068 || (code == INTEGER_CST && ecode == INTEGER_CST
3069 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3070 return fold (build1 (NOP_EXPR, type, expr));
3071
3072 switch (code)
3073 {
3074 case VOID_TYPE:
3075 return build1 (CONVERT_EXPR, type, expr);
3076
3077 case INTEGER_TYPE:
3078 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3079 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3080 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3081 return unchecked_convert (type, expr, 0);
3082 else if (TYPE_BIASED_REPRESENTATION_P (type))
3083 return fold (build1 (CONVERT_EXPR, type,
3084 fold (build (MINUS_EXPR, TREE_TYPE (type),
3085 convert (TREE_TYPE (type), expr),
3086 TYPE_MIN_VALUE (type)))));
3087
3088 /* ... fall through ... */
3089
3090 case ENUMERAL_TYPE:
3091 return fold (convert_to_integer (type, expr));
3092
3093 case POINTER_TYPE:
3094 case REFERENCE_TYPE:
3095 /* If converting between two pointers to records denoting
3096 both a template and type, adjust if needed to account
3097 for any differing offsets, since one might be negative. */
3098 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3099 {
3100 tree bit_diff
3101 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3102 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3103 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3104 sbitsize_int (BITS_PER_UNIT));
3105
3106 expr = build1 (NOP_EXPR, type, expr);
3107 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3108 if (integer_zerop (byte_diff))
3109 return expr;
3110
3111 return build_binary_op (PLUS_EXPR, type, expr,
3112 fold (convert_to_pointer (type, byte_diff)));
3113 }
3114
3115 /* If converting to a thin pointer, handle specially. */
3116 if (TYPE_THIN_POINTER_P (type)
3117 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3118 return convert_to_thin_pointer (type, expr);
3119
3120 /* If converting fat pointer to normal pointer, get the pointer to the
3121 array and then convert it. */
3122 else if (TYPE_FAT_POINTER_P (etype))
3123 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3124 NULL_TREE, 0);
3125
3126 return fold (convert_to_pointer (type, expr));
3127
3128 case REAL_TYPE:
3129 return fold (convert_to_real (type, expr));
3130
3131 case RECORD_TYPE:
3132 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3133 return
3134 gnat_build_constructor
3135 (type, tree_cons (TYPE_FIELDS (type),
3136 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3137 NULL_TREE));
3138
3139 /* ... fall through ... */
3140
3141 case ARRAY_TYPE:
3142 /* In these cases, assume the front-end has validated the conversion.
3143 If the conversion is valid, it will be a bit-wise conversion, so
3144 it can be viewed as an unchecked conversion. */
3145 return unchecked_convert (type, expr, 0);
3146
3147 case UNION_TYPE:
3148 /* Just validate that the type is indeed that of a field
3149 of the type. Then make the simple conversion. */
3150 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3151 {
3152 if (TREE_TYPE (tem) == etype)
3153 return build1 (CONVERT_EXPR, type, expr);
3154 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3155 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3156 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3157 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3158 return build1 (CONVERT_EXPR, type,
3159 convert (TREE_TYPE (tem), expr));
3160 }
3161
3162 gigi_abort (413);
3163
3164 case UNCONSTRAINED_ARRAY_TYPE:
3165 /* If EXPR is a constrained array, take its address, convert it to a
3166 fat pointer, and then dereference it. Likewise if EXPR is a
3167 record containing both a template and a constrained array.
3168 Note that a record representing a left justified modular type
3169 always represents a packed constrained array. */
3170 if (ecode == ARRAY_TYPE
3171 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3172 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3173 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3174 return
3175 build_unary_op
3176 (INDIRECT_REF, NULL_TREE,
3177 convert_to_fat_pointer (TREE_TYPE (type),
3178 build_unary_op (ADDR_EXPR,
3179 NULL_TREE, expr)));
3180
3181 /* Do something very similar for converting one unconstrained
3182 array to another. */
3183 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3184 return
3185 build_unary_op (INDIRECT_REF, NULL_TREE,
3186 convert (TREE_TYPE (type),
3187 build_unary_op (ADDR_EXPR,
3188 NULL_TREE, expr)));
3189 else
3190 gigi_abort (409);
3191
3192 case COMPLEX_TYPE:
3193 return fold (convert_to_complex (type, expr));
3194
3195 default:
3196 gigi_abort (410);
3197 }
3198 }
3199 \f
3200 /* Remove all conversions that are done in EXP. This includes converting
3201 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3202 is nonzero, always return the address of the containing object even if
3203 the address is not bit-aligned. */
3204
3205 tree
3206 remove_conversions (tree exp, int true_address)
3207 {
3208 switch (TREE_CODE (exp))
3209 {
3210 case CONSTRUCTOR:
3211 if (true_address
3212 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3213 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3214 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3215 break;
3216
3217 case COMPONENT_REF:
3218 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3219 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3220 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3221 break;
3222
3223 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3224 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3225 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3226
3227 default:
3228 break;
3229 }
3230
3231 return exp;
3232 }
3233 \f
3234 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3235 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3236 likewise return an expression pointing to the underlying array. */
3237
3238 tree
3239 maybe_unconstrained_array (tree exp)
3240 {
3241 enum tree_code code = TREE_CODE (exp);
3242 tree new;
3243
3244 switch (TREE_CODE (TREE_TYPE (exp)))
3245 {
3246 case UNCONSTRAINED_ARRAY_TYPE:
3247 if (code == UNCONSTRAINED_ARRAY_REF)
3248 {
3249 new
3250 = build_unary_op (INDIRECT_REF, NULL_TREE,
3251 build_component_ref (TREE_OPERAND (exp, 0),
3252 get_identifier ("P_ARRAY"),
3253 NULL_TREE, 0));
3254 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3255 return new;
3256 }
3257
3258 else if (code == NULL_EXPR)
3259 return build1 (NULL_EXPR,
3260 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3261 (TREE_TYPE (TREE_TYPE (exp))))),
3262 TREE_OPERAND (exp, 0));
3263
3264 case RECORD_TYPE:
3265 /* If this is a padded type, convert to the unpadded type and see if
3266 it contains a template. */
3267 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3268 {
3269 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3270 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3271 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3272 return
3273 build_component_ref (new, NULL_TREE,
3274 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3275 0);
3276 }
3277 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3278 return
3279 build_component_ref (exp, NULL_TREE,
3280 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3281 break;
3282
3283 default:
3284 break;
3285 }
3286
3287 return exp;
3288 }
3289 \f
3290 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3291 If NOTRUNC_P is set, truncation operations should be suppressed. */
3292
3293 tree
3294 unchecked_convert (tree type, tree expr, int notrunc_p)
3295 {
3296 tree etype = TREE_TYPE (expr);
3297
3298 /* If the expression is already the right type, we are done. */
3299 if (etype == type)
3300 return expr;
3301
3302 /* If both types types are integral just do a normal conversion.
3303 Likewise for a conversion to an unconstrained array. */
3304 if ((((INTEGRAL_TYPE_P (type)
3305 && ! (TREE_CODE (type) == INTEGER_TYPE
3306 && TYPE_VAX_FLOATING_POINT_P (type)))
3307 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3308 || (TREE_CODE (type) == RECORD_TYPE
3309 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3310 && ((INTEGRAL_TYPE_P (etype)
3311 && ! (TREE_CODE (etype) == INTEGER_TYPE
3312 && TYPE_VAX_FLOATING_POINT_P (etype)))
3313 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3314 || (TREE_CODE (etype) == RECORD_TYPE
3315 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3316 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3317 {
3318 tree rtype = type;
3319
3320 if (TREE_CODE (etype) == INTEGER_TYPE
3321 && TYPE_BIASED_REPRESENTATION_P (etype))
3322 {
3323 tree ntype = copy_type (etype);
3324
3325 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3326 TYPE_MAIN_VARIANT (ntype) = ntype;
3327 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3328 }
3329
3330 if (TREE_CODE (type) == INTEGER_TYPE
3331 && TYPE_BIASED_REPRESENTATION_P (type))
3332 {
3333 rtype = copy_type (type);
3334 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3335 TYPE_MAIN_VARIANT (rtype) = rtype;
3336 }
3337
3338 expr = convert (rtype, expr);
3339 if (type != rtype)
3340 expr = build1 (GNAT_NOP_EXPR, type, expr);
3341 }
3342
3343 /* If we are converting TO an integral type whose precision is not the
3344 same as its size, first unchecked convert to a record that contains
3345 an object of the output type. Then extract the field. */
3346 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3347 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3348 GET_MODE_BITSIZE (TYPE_MODE (type))))
3349 {
3350 tree rec_type = make_node (RECORD_TYPE);
3351 tree field = create_field_decl (get_identifier ("OBJ"), type,
3352 rec_type, 1, 0, 0, 0);
3353
3354 TYPE_FIELDS (rec_type) = field;
3355 layout_type (rec_type);
3356
3357 expr = unchecked_convert (rec_type, expr, notrunc_p);
3358 expr = build_component_ref (expr, NULL_TREE, field, 0);
3359 }
3360
3361 /* Similarly for integral input type whose precision is not equal to its
3362 size. */
3363 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3364 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3365 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3366 {
3367 tree rec_type = make_node (RECORD_TYPE);
3368 tree field
3369 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3370 1, 0, 0, 0);
3371
3372 TYPE_FIELDS (rec_type) = field;
3373 layout_type (rec_type);
3374
3375 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3376 expr = unchecked_convert (type, expr, notrunc_p);
3377 }
3378
3379 /* We have a special case when we are converting between two
3380 unconstrained array types. In that case, take the address,
3381 convert the fat pointer types, and dereference. */
3382 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3383 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3384 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3385 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3386 build_unary_op (ADDR_EXPR, NULL_TREE,
3387 expr)));
3388 else
3389 {
3390 expr = maybe_unconstrained_array (expr);
3391 etype = TREE_TYPE (expr);
3392 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3393 }
3394
3395 /* If the result is an integral type whose size is not equal to
3396 the size of the underlying machine type, sign- or zero-extend
3397 the result. We need not do this in the case where the input is
3398 an integral type of the same precision and signedness or if the output
3399 is a biased type or if both the input and output are unsigned. */
3400 if (! notrunc_p
3401 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3402 && ! (TREE_CODE (type) == INTEGER_TYPE
3403 && TYPE_BIASED_REPRESENTATION_P (type))
3404 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3405 GET_MODE_BITSIZE (TYPE_MODE (type)))
3406 && ! (INTEGRAL_TYPE_P (etype)
3407 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3408 && operand_equal_p (TYPE_RM_SIZE (type),
3409 (TYPE_RM_SIZE (etype) != 0
3410 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3411 0))
3412 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3413 {
3414 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3415 TYPE_UNSIGNED (type));
3416 tree shift_expr
3417 = convert (base_type,
3418 size_binop (MINUS_EXPR,
3419 bitsize_int
3420 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3421 TYPE_RM_SIZE (type)));
3422 expr
3423 = convert (type,
3424 build_binary_op (RSHIFT_EXPR, base_type,
3425 build_binary_op (LSHIFT_EXPR, base_type,
3426 convert (base_type, expr),
3427 shift_expr),
3428 shift_expr));
3429 }
3430
3431 /* An unchecked conversion should never raise Constraint_Error. The code
3432 below assumes that GCC's conversion routines overflow the same way that
3433 the underlying hardware does. This is probably true. In the rare case
3434 when it is false, we can rely on the fact that such conversions are
3435 erroneous anyway. */
3436 if (TREE_CODE (expr) == INTEGER_CST)
3437 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3438
3439 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3440 show no longer constant. */
3441 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3442 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3443 TREE_CONSTANT (expr) = 0;
3444
3445 return expr;
3446 }
3447
3448 #include "gt-ada-utils.h"
3449 #include "gtype-ada.h"