system.h (ASM_OUTPUT_SECTION_NAME): Poison.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40
41 #include "ada.h"
42 #include "types.h"
43 #include "atree.h"
44 #include "elists.h"
45 #include "namet.h"
46 #include "nlists.h"
47 #include "stringt.h"
48 #include "uintp.h"
49 #include "fe.h"
50 #include "sinfo.h"
51 #include "einfo.h"
52 #include "ada-tree.h"
53 #include "gigi.h"
54
55 #ifndef MAX_FIXED_MODE_SIZE
56 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #endif
58
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 #endif
62
63 /* If nonzero, pretend we are allocating at global level. */
64 int force_global;
65
66 /* Tree nodes for the various types and decls we create. */
67 tree gnat_std_decls[(int) ADT_LAST];
68
69 /* Functions to call for each of the possible raise reasons. */
70 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
71
72 /* Associates a GNAT tree node to a GCC tree node. It is used in
73 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
74 of `save_gnu_tree' for more info. */
75 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
76
77 /* This listhead is used to record any global objects that need elaboration.
78 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
79 initial value to assign. */
80
81 static GTY(()) tree pending_elaborations;
82
83 /* This stack allows us to momentarily switch to generating elaboration
84 lists for an inner context. */
85
86 struct e_stack GTY(()) {
87 struct e_stack *next;
88 tree elab_list;
89 };
90 static GTY(()) struct e_stack *elist_stack;
91
92 /* This variable keeps a table for types for each precision so that we only
93 allocate each of them once. Signed and unsigned types are kept separate.
94
95 Note that these types are only used when fold-const requests something
96 special. Perhaps we should NOT share these types; we'll see how it
97 goes later. */
98 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
99
100 /* Likewise for float types, but record these by mode. */
101 static GTY(()) tree float_types[NUM_MACHINE_MODES];
102
103 /* For each binding contour we allocate a binding_level structure which records
104 the entities defined or declared in that contour. Contours include:
105
106 the global one
107 one for each subprogram definition
108 one for each compound statement (declare block)
109
110 Binding contours are used to create GCC tree BLOCK nodes. */
111
112 struct binding_level GTY(())
113 {
114 /* A chain of ..._DECL nodes for all variables, constants, functions,
115 parameters and type declarations. These ..._DECL nodes are chained
116 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
117 in the reverse of the order supplied to be compatible with the
118 back-end. */
119 tree names;
120 /* For each level (except the global one), a chain of BLOCK nodes for all
121 the levels that were entered and exited one level down from this one. */
122 tree blocks;
123 /* The BLOCK node for this level, if one has been preallocated.
124 If 0, the BLOCK is allocated (if needed) when the level is popped. */
125 tree this_block;
126 /* The binding level containing this one (the enclosing binding level). */
127 struct binding_level *level_chain;
128 };
129
130 /* The binding level currently in effect. */
131 static GTY(()) struct binding_level *current_binding_level;
132
133 /* A chain of binding_level structures awaiting reuse. */
134 static GTY((deletable (""))) struct binding_level *free_binding_level;
135
136 /* The outermost binding level. This binding level is created when the
137 compiler is started and it will exist through the entire compilation. */
138 static struct binding_level *global_binding_level;
139
140 /* Binding level structures are initialized by copying this one. */
141 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
142
143 struct language_function GTY(())
144 {
145 int unused;
146 };
147
148 static tree merge_sizes (tree, tree, tree, int, int);
149 static tree compute_related_constant (tree, tree);
150 static tree split_plus (tree, tree *);
151 static int value_zerop (tree);
152 static tree float_type_for_precision (int, enum machine_mode);
153 static tree convert_to_fat_pointer (tree, tree);
154 static tree convert_to_thin_pointer (tree, tree);
155 static tree make_descriptor_field (const char *,tree, tree, tree);
156 static int value_factor_p (tree, int);
157 static int potential_alignment_gap (tree, tree, tree);
158 \f
159 /* Initialize the association of GNAT nodes to GCC trees. */
160
161 void
162 init_gnat_to_gnu (void)
163 {
164 associate_gnat_to_gnu
165 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
166
167 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
168 }
169
170 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
171 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
172 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
173
174 If GNU_DECL is zero, a previous association is to be reset. */
175
176 void
177 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
178 {
179 /* Check that GNAT_ENTITY is not already defined and that it is being set
180 to something which is a decl. Raise gigi 401 if not. Usually, this
181 means GNAT_ENTITY is defined twice, but occasionally is due to some
182 Gigi problem. */
183 if (gnu_decl
184 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
185 || (! no_check && ! DECL_P (gnu_decl))))
186 gigi_abort (401);
187
188 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
189 }
190
191 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
192 Return the ..._DECL node that was associated with it. If there is no tree
193 node associated with GNAT_ENTITY, abort.
194
195 In some cases, such as delayed elaboration or expressions that need to
196 be elaborated only once, GNAT_ENTITY is really not an entity. */
197
198 tree
199 get_gnu_tree (Entity_Id gnat_entity)
200 {
201 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
202 gigi_abort (402);
203
204 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
205 }
206
207 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
208
209 int
210 present_gnu_tree (Entity_Id gnat_entity)
211 {
212 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
213 }
214
215 \f
216 /* Return non-zero if we are currently in the global binding level. */
217
218 int
219 global_bindings_p (void)
220 {
221 return (force_global != 0 || current_binding_level == global_binding_level
222 ? -1 : 0);
223 }
224
225 /* Return the list of declarations in the current level. Note that this list
226 is in reverse order (it has to be so for back-end compatibility). */
227
228 tree
229 getdecls (void)
230 {
231 return current_binding_level->names;
232 }
233
234 /* Nonzero if the current level needs to have a BLOCK made. */
235
236 int
237 kept_level_p (void)
238 {
239 return (current_binding_level->names != 0);
240 }
241
242 /* Enter a new binding level. The input parameter is ignored, but has to be
243 specified for back-end compatibility. */
244
245 void
246 pushlevel (int ignore ATTRIBUTE_UNUSED)
247 {
248 struct binding_level *newlevel = NULL;
249
250 /* Reuse a struct for this binding level, if there is one. */
251 if (free_binding_level)
252 {
253 newlevel = free_binding_level;
254 free_binding_level = free_binding_level->level_chain;
255 }
256 else
257 newlevel
258 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
259
260 *newlevel = clear_binding_level;
261
262 /* Add this level to the front of the chain (stack) of levels that are
263 active. */
264 newlevel->level_chain = current_binding_level;
265 current_binding_level = newlevel;
266 }
267
268 /* Exit a binding level.
269 Pop the level off, and restore the state of the identifier-decl mappings
270 that were in effect when this level was entered.
271
272 If KEEP is nonzero, this level had explicit declarations, so
273 and create a "block" (a BLOCK node) for the level
274 to record its declarations and subblocks for symbol table output.
275
276 If FUNCTIONBODY is nonzero, this level is the body of a function,
277 so create a block as if KEEP were set and also clear out all
278 label names.
279
280 If REVERSE is nonzero, reverse the order of decls before putting
281 them into the BLOCK. */
282
283 tree
284 poplevel (int keep, int reverse, int functionbody)
285 {
286 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
287 binding level that we are about to exit and which is returned by this
288 routine. */
289 tree block = NULL_TREE;
290 tree decl_chain;
291 tree decl_node;
292 tree subblock_chain = current_binding_level->blocks;
293 tree subblock_node;
294 int block_previously_created;
295
296 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
297 nodes chained through the `names' field of current_binding_level are in
298 reverse order except for PARM_DECL node, which are explicitly stored in
299 the right order. */
300 current_binding_level->names
301 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
302 : current_binding_level->names;
303
304 /* Output any nested inline functions within this block which must be
305 compiled because their address is needed. */
306 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
307 if (TREE_CODE (decl_node) == FUNCTION_DECL
308 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
309 && DECL_INITIAL (decl_node) != 0)
310 {
311 push_function_context ();
312 output_inline_function (decl_node);
313 pop_function_context ();
314 }
315
316 block = 0;
317 block_previously_created = (current_binding_level->this_block != 0);
318 if (block_previously_created)
319 block = current_binding_level->this_block;
320 else if (keep || functionbody)
321 block = make_node (BLOCK);
322 if (block != 0)
323 {
324 BLOCK_VARS (block) = keep ? decl_chain : 0;
325 BLOCK_SUBBLOCKS (block) = subblock_chain;
326 }
327
328 /* Record the BLOCK node just built as the subblock its enclosing scope. */
329 for (subblock_node = subblock_chain; subblock_node;
330 subblock_node = TREE_CHAIN (subblock_node))
331 BLOCK_SUPERCONTEXT (subblock_node) = block;
332
333 /* Clear out the meanings of the local variables of this level. */
334
335 for (subblock_node = decl_chain; subblock_node;
336 subblock_node = TREE_CHAIN (subblock_node))
337 if (DECL_NAME (subblock_node) != 0)
338 /* If the identifier was used or addressed via a local extern decl,
339 don't forget that fact. */
340 if (DECL_EXTERNAL (subblock_node))
341 {
342 if (TREE_USED (subblock_node))
343 TREE_USED (DECL_NAME (subblock_node)) = 1;
344 if (TREE_ADDRESSABLE (subblock_node))
345 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
346 }
347
348 {
349 /* Pop the current level, and free the structure for reuse. */
350 struct binding_level *level = current_binding_level;
351 current_binding_level = current_binding_level->level_chain;
352 level->level_chain = free_binding_level;
353 free_binding_level = level;
354 }
355
356 if (functionbody)
357 {
358 /* This is the top level block of a function. The ..._DECL chain stored
359 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
360 leave them in the BLOCK because they are found in the FUNCTION_DECL
361 instead. */
362 DECL_INITIAL (current_function_decl) = block;
363 BLOCK_VARS (block) = 0;
364 }
365 else if (block)
366 {
367 if (!block_previously_created)
368 current_binding_level->blocks
369 = chainon (current_binding_level->blocks, block);
370 }
371
372 /* If we did not make a block for the level just exited, any blocks made for
373 inner levels (since they cannot be recorded as subblocks in that level)
374 must be carried forward so they will later become subblocks of something
375 else. */
376 else if (subblock_chain)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, subblock_chain);
379 if (block)
380 TREE_USED (block) = 1;
381
382 return block;
383 }
384 \f
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
388
389 void
390 insert_block (tree block)
391 {
392 TREE_USED (block) = 1;
393 current_binding_level->blocks
394 = chainon (current_binding_level->blocks, block);
395 }
396
397 /* Set the BLOCK node for the innermost scope
398 (the one we are currently in). */
399
400 void
401 set_block (tree block)
402 {
403 current_binding_level->this_block = block;
404 current_binding_level->names = chainon (current_binding_level->names,
405 BLOCK_VARS (block));
406 current_binding_level->blocks = chainon (current_binding_level->blocks,
407 BLOCK_SUBBLOCKS (block));
408 }
409
410 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
411 Returns the ..._DECL node. */
412
413 tree
414 pushdecl (tree decl)
415 {
416 struct binding_level *b;
417
418 /* If at top level, there is no context. But PARM_DECLs always go in the
419 level of its function. */
420 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
421 {
422 b = global_binding_level;
423 DECL_CONTEXT (decl) = 0;
424 }
425 else
426 {
427 b = current_binding_level;
428 DECL_CONTEXT (decl) = current_function_decl;
429 }
430
431 /* Put the declaration on the list. The list of declarations is in reverse
432 order. The list will be reversed later if necessary. This needs to be
433 this way for compatibility with the back-end.
434
435 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
436 will cause trouble with the debugger and aren't needed anyway. */
437 if (TREE_CODE (decl) != TYPE_DECL
438 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
439 {
440 TREE_CHAIN (decl) = b->names;
441 b->names = decl;
442 }
443
444 /* For the declaration of a type, set its name if it either is not already
445 set, was set to an IDENTIFIER_NODE, indicating an internal name,
446 or if the previous type name was not derived from a source name.
447 We'd rather have the type named with a real name and all the pointer
448 types to the same object have the same POINTER_TYPE node. Code in this
449 function in c-decl.c makes a copy of the type node here, but that may
450 cause us trouble with incomplete types, so let's not try it (at least
451 for now). */
452
453 if (TREE_CODE (decl) == TYPE_DECL
454 && DECL_NAME (decl) != 0
455 && (TYPE_NAME (TREE_TYPE (decl)) == 0
456 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
457 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
458 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
459 && ! DECL_ARTIFICIAL (decl))))
460 TYPE_NAME (TREE_TYPE (decl)) = decl;
461
462 return decl;
463 }
464 \f
465 /* Do little here. Set up the standard declarations later after the
466 front end has been run. */
467
468 void
469 gnat_init_decl_processing (void)
470 {
471 input_line = 0;
472
473 /* Make the binding_level structure for global names. */
474 current_function_decl = 0;
475 current_binding_level = 0;
476 free_binding_level = 0;
477 pushlevel (0);
478 global_binding_level = current_binding_level;
479
480 build_common_tree_nodes (0);
481
482 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
483 corresponding to the size of Pmode. In most cases when ptr_mode and
484 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
485 far better code using the width of Pmode. Make this here since we need
486 this before we can expand the GNAT types. */
487 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
488 build_common_tree_nodes_2 (0);
489
490 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
491
492 /* We need to make the integer type before doing anything else.
493 We stitch this in to the appropriate GNAT type later. */
494 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
495 integer_type_node));
496 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
497 char_type_node));
498
499 ptr_void_type_node = build_pointer_type (void_type_node);
500
501 }
502
503 /* Create the predefined scalar types such as `integer_type_node' needed
504 in the gcc back-end and initialize the global binding level. */
505
506 void
507 init_gigi_decls (tree long_long_float_type, tree exception_type)
508 {
509 tree endlink, decl;
510 unsigned int i;
511
512 /* Set the types that GCC and Gigi use from the front end. We would like
513 to do this for char_type_node, but it needs to correspond to the C
514 char type. */
515 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
516 {
517 /* In this case, the builtin floating point types are VAX float,
518 so make up a type for use. */
519 longest_float_type_node = make_node (REAL_TYPE);
520 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
521 layout_type (longest_float_type_node);
522 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
523 longest_float_type_node));
524 }
525 else
526 longest_float_type_node = TREE_TYPE (long_long_float_type);
527
528 except_type_node = TREE_TYPE (exception_type);
529
530 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
531 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
532 unsigned_type_node));
533
534 void_type_decl_node
535 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
536 void_type_node));
537
538 void_ftype = build_function_type (void_type_node, NULL_TREE);
539 ptr_void_ftype = build_pointer_type (void_ftype);
540
541 /* Now declare runtime functions. */
542 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
543
544 /* malloc is a function declaration tree for a function to allocate
545 memory. */
546 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
547 NULL_TREE,
548 build_function_type (ptr_void_type_node,
549 tree_cons (NULL_TREE,
550 sizetype,
551 endlink)),
552 NULL_TREE, 0, 1, 1, 0);
553
554 /* free is a function declaration tree for a function to free memory. */
555 free_decl
556 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
557 build_function_type (void_type_node,
558 tree_cons (NULL_TREE,
559 ptr_void_type_node,
560 endlink)),
561 NULL_TREE, 0, 1, 1, 0);
562
563 /* Make the types and functions used for exception processing. */
564 jmpbuf_type
565 = build_array_type (gnat_type_for_mode (Pmode, 0),
566 build_index_type (build_int_2 (5, 0)));
567 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
568 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
569
570 /* Functions to get and set the jumpbuf pointer for the current thread. */
571 get_jmpbuf_decl
572 = create_subprog_decl
573 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
574 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
575 NULL_TREE, 0, 1, 1, 0);
576
577 set_jmpbuf_decl
578 = create_subprog_decl
579 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
580 NULL_TREE,
581 build_function_type (void_type_node,
582 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
583 NULL_TREE, 0, 1, 1, 0);
584
585 /* Function to get the current exception. */
586 get_excptr_decl
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__get_gnat_exception"),
589 NULL_TREE,
590 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
591 NULL_TREE, 0, 1, 1, 0);
592
593 /* Functions that raise exceptions. */
594 raise_nodefer_decl
595 = create_subprog_decl
596 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
597 build_function_type (void_type_node,
598 tree_cons (NULL_TREE,
599 build_pointer_type (except_type_node),
600 endlink)),
601 NULL_TREE, 0, 1, 1, 0);
602
603 /* Hooks to call when entering/leaving an exception handler. */
604 begin_handler_decl
605 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
606 build_function_type (void_type_node,
607 tree_cons (NULL_TREE,
608 ptr_void_type_node,
609 endlink)),
610 NULL_TREE, 0, 1, 1, 0);
611
612 end_handler_decl
613 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 ptr_void_type_node,
617 endlink)),
618 NULL_TREE, 0, 1, 1, 0);
619
620 /* If in no exception handlers mode, all raise statements are redirected to
621 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
622 this procedure will never be called in this mode. */
623 if (No_Exception_Handlers_Set ())
624 {
625 decl
626 = create_subprog_decl
627 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
630 build_pointer_type (char_type_node),
631 tree_cons (NULL_TREE,
632 integer_type_node,
633 endlink))),
634 NULL_TREE, 0, 1, 1, 0);
635
636 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
637 gnat_raise_decls[i] = decl;
638 }
639 else
640 /* Otherwise, make one decl for each exception reason. */
641 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
642 {
643 char name[17];
644
645 sprintf (name, "__gnat_rcheck_%.2d", i);
646 gnat_raise_decls[i]
647 = create_subprog_decl
648 (get_identifier (name), NULL_TREE,
649 build_function_type (void_type_node,
650 tree_cons (NULL_TREE,
651 build_pointer_type
652 (char_type_node),
653 tree_cons (NULL_TREE,
654 integer_type_node,
655 endlink))),
656 NULL_TREE, 0, 1, 1, 0);
657 }
658
659 /* Indicate that these never return. */
660 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
661 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
662 TREE_TYPE (raise_nodefer_decl)
663 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
664 TYPE_QUAL_VOLATILE);
665
666 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
667 {
668 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
669 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
670 TREE_TYPE (gnat_raise_decls[i])
671 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
672 TYPE_QUAL_VOLATILE);
673 }
674
675 /* setjmp returns an integer and has one operand, which is a pointer to
676 a jmpbuf. */
677 setjmp_decl
678 = create_subprog_decl
679 (get_identifier ("__builtin_setjmp"), NULL_TREE,
680 build_function_type (integer_type_node,
681 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
682 NULL_TREE, 0, 1, 1, 0);
683
684 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
685 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
686
687 main_identifier_node = get_identifier ("main");
688 }
689 \f
690 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
691 nodes (FIELDLIST), finish constructing the record or union type.
692 If HAS_REP is nonzero, this record has a rep clause; don't call
693 layout_type but merely set the size and alignment ourselves.
694 If DEFER_DEBUG is nonzero, do not call the debugging routines
695 on this type; it will be done later. */
696
697 void
698 finish_record_type (tree record_type,
699 tree fieldlist,
700 int has_rep,
701 int defer_debug)
702 {
703 enum tree_code code = TREE_CODE (record_type);
704 tree ada_size = bitsize_zero_node;
705 tree size = bitsize_zero_node;
706 tree size_unit = size_zero_node;
707 int var_size = 0;
708 tree field;
709
710 TYPE_FIELDS (record_type) = fieldlist;
711
712 if (TYPE_NAME (record_type) != 0
713 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
714 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
715 else
716 TYPE_STUB_DECL (record_type)
717 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
718 record_type));
719
720 /* We don't need both the typedef name and the record name output in
721 the debugging information, since they are the same. */
722 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
723
724 /* Globally initialize the record first. If this is a rep'ed record,
725 that just means some initializations; otherwise, layout the record. */
726
727 if (has_rep)
728 {
729 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
730 TYPE_MODE (record_type) = BLKmode;
731 if (TYPE_SIZE (record_type) == 0)
732 {
733 TYPE_SIZE (record_type) = bitsize_zero_node;
734 TYPE_SIZE_UNIT (record_type) = size_zero_node;
735 }
736 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
737 out just like a UNION_TYPE, since the size will be fixed. */
738 else if (code == QUAL_UNION_TYPE)
739 code = UNION_TYPE;
740 }
741 else
742 {
743 /* Ensure there isn't a size already set. There can be in an error
744 case where there is a rep clause but all fields have errors and
745 no longer have a position. */
746 TYPE_SIZE (record_type) = 0;
747 layout_type (record_type);
748 }
749
750 /* At this point, the position and size of each field is known. It was
751 either set before entry by a rep clause, or by laying out the type
752 above. We now make a pass through the fields (in reverse order for
753 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
754 (for rep'ed records that are not padding types); and the mode (for
755 rep'ed records). */
756
757 if (code == QUAL_UNION_TYPE)
758 fieldlist = nreverse (fieldlist);
759
760 for (field = fieldlist; field; field = TREE_CHAIN (field))
761 {
762 tree type = TREE_TYPE (field);
763 tree this_size = DECL_SIZE (field);
764 tree this_size_unit = DECL_SIZE_UNIT (field);
765 tree this_ada_size = DECL_SIZE (field);
766
767 /* We need to make an XVE/XVU record if any field has variable size,
768 whether or not the record does. For example, if we have an union,
769 it may be that all fields, rounded up to the alignment, have the
770 same size, in which case we'll use that size. But the debug
771 output routines (except Dwarf2) won't be able to output the fields,
772 so we need to make the special record. */
773 if (TREE_CODE (this_size) != INTEGER_CST)
774 var_size = 1;
775
776 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
777 || TREE_CODE (type) == QUAL_UNION_TYPE)
778 && ! TYPE_IS_FAT_POINTER_P (type)
779 && ! TYPE_CONTAINS_TEMPLATE_P (type)
780 && TYPE_ADA_SIZE (type) != 0)
781 this_ada_size = TYPE_ADA_SIZE (type);
782
783 if (has_rep && ! DECL_BIT_FIELD (field))
784 TYPE_ALIGN (record_type)
785 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
786
787 switch (code)
788 {
789 case UNION_TYPE:
790 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
791 size = size_binop (MAX_EXPR, size, this_size);
792 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
793 break;
794
795 case QUAL_UNION_TYPE:
796 ada_size
797 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
798 this_ada_size, ada_size));
799 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
800 this_size, size));
801 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
802 this_size_unit, size_unit));
803 break;
804
805 case RECORD_TYPE:
806 /* Since we know here that all fields are sorted in order of
807 increasing bit position, the size of the record is one
808 higher than the ending bit of the last field processed
809 unless we have a rep clause, since in that case we might
810 have a field outside a QUAL_UNION_TYPE that has a higher ending
811 position. So use a MAX in that case. Also, if this field is a
812 QUAL_UNION_TYPE, we need to take into account the previous size in
813 the case of empty variants. */
814 ada_size
815 = merge_sizes (ada_size, bit_position (field), this_ada_size,
816 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
817 size = merge_sizes (size, bit_position (field), this_size,
818 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
819 size_unit
820 = merge_sizes (size_unit, byte_position (field), this_size_unit,
821 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
822 break;
823
824 default:
825 abort ();
826 }
827 }
828
829 if (code == QUAL_UNION_TYPE)
830 nreverse (fieldlist);
831
832 /* If this is a padding record, we never want to make the size smaller than
833 what was specified in it, if any. */
834 if (TREE_CODE (record_type) == RECORD_TYPE
835 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
836 {
837 size = TYPE_SIZE (record_type);
838 size_unit = TYPE_SIZE_UNIT (record_type);
839 }
840
841 /* Now set any of the values we've just computed that apply. */
842 if (! TYPE_IS_FAT_POINTER_P (record_type)
843 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
844 SET_TYPE_ADA_SIZE (record_type, ada_size);
845
846 if (has_rep)
847 {
848 if (! (TREE_CODE (record_type) == RECORD_TYPE
849 && TYPE_IS_PADDING_P (record_type)
850 && CONTAINS_PLACEHOLDER_P (size)))
851 {
852 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
853 TYPE_SIZE_UNIT (record_type)
854 = round_up (size_unit,
855 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
856 }
857
858 compute_record_mode (record_type);
859 }
860
861 if (! defer_debug)
862 {
863 /* If this record is of variable size, rename it so that the
864 debugger knows it is and make a new, parallel, record
865 that tells the debugger how the record is laid out. See
866 exp_dbug.ads. But don't do this for records that are padding
867 since they confuse GDB. */
868 if (var_size
869 && ! (TREE_CODE (record_type) == RECORD_TYPE
870 && TYPE_IS_PADDING_P (record_type)))
871 {
872 tree new_record_type
873 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
874 ? UNION_TYPE : TREE_CODE (record_type));
875 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
876 tree new_id
877 = concat_id_with_name (orig_id,
878 TREE_CODE (record_type) == QUAL_UNION_TYPE
879 ? "XVU" : "XVE");
880 tree last_pos = bitsize_zero_node;
881 tree old_field;
882 tree prev_old_field = 0;
883
884 TYPE_NAME (new_record_type) = new_id;
885 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
886 TYPE_STUB_DECL (new_record_type)
887 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
888 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
889 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
890 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
891 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
892
893 /* Now scan all the fields, replacing each field with a new
894 field corresponding to the new encoding. */
895 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
896 old_field = TREE_CHAIN (old_field))
897 {
898 tree field_type = TREE_TYPE (old_field);
899 tree field_name = DECL_NAME (old_field);
900 tree new_field;
901 tree curpos = bit_position (old_field);
902 int var = 0;
903 unsigned int align = 0;
904 tree pos;
905
906 /* See how the position was modified from the last position.
907
908 There are two basic cases we support: a value was added
909 to the last position or the last position was rounded to
910 a boundary and they something was added. Check for the
911 first case first. If not, see if there is any evidence
912 of rounding. If so, round the last position and try
913 again.
914
915 If this is a union, the position can be taken as zero. */
916
917 if (TREE_CODE (new_record_type) == UNION_TYPE)
918 pos = bitsize_zero_node, align = 0;
919 else
920 pos = compute_related_constant (curpos, last_pos);
921
922 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
923 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
924 {
925 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
926 pos = compute_related_constant (curpos,
927 round_up (last_pos, align));
928 }
929 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
930 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
931 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
932 && host_integerp (TREE_OPERAND
933 (TREE_OPERAND (curpos, 0), 1),
934 1))
935 {
936 align
937 = tree_low_cst
938 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
939 pos = compute_related_constant (curpos,
940 round_up (last_pos, align));
941 }
942 else if (potential_alignment_gap (prev_old_field, old_field,
943 pos))
944 {
945 align = TYPE_ALIGN (field_type);
946 pos = compute_related_constant (curpos,
947 round_up (last_pos, align));
948 }
949
950 /* If we can't compute a position, set it to zero.
951
952 ??? We really should abort here, but it's too much work
953 to get this correct for all cases. */
954
955 if (pos == 0)
956 pos = bitsize_zero_node;
957
958 /* See if this type is variable-size and make a new type
959 and indicate the indirection if so. */
960 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
961 {
962 field_type = build_pointer_type (field_type);
963 var = 1;
964 }
965
966 /* Make a new field name, if necessary. */
967 if (var || align != 0)
968 {
969 char suffix[6];
970
971 if (align != 0)
972 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
973 align / BITS_PER_UNIT);
974 else
975 strcpy (suffix, "XVL");
976
977 field_name = concat_id_with_name (field_name, suffix);
978 }
979
980 new_field = create_field_decl (field_name, field_type,
981 new_record_type, 0,
982 DECL_SIZE (old_field), pos, 0);
983 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
984 TYPE_FIELDS (new_record_type) = new_field;
985
986 /* If old_field is a QUAL_UNION_TYPE, take its size as being
987 zero. The only time it's not the last field of the record
988 is when there are other components at fixed positions after
989 it (meaning there was a rep clause for every field) and we
990 want to be able to encode them. */
991 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
992 (TREE_CODE (TREE_TYPE (old_field))
993 == QUAL_UNION_TYPE)
994 ? bitsize_zero_node
995 : DECL_SIZE (old_field));
996 prev_old_field = old_field;
997 }
998
999 TYPE_FIELDS (new_record_type)
1000 = nreverse (TYPE_FIELDS (new_record_type));
1001
1002 rest_of_type_compilation (new_record_type, global_bindings_p ());
1003 }
1004
1005 rest_of_type_compilation (record_type, global_bindings_p ());
1006 }
1007 }
1008
1009 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1010 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1011 if this represents a QUAL_UNION_TYPE in which case we must look for
1012 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1013 is nonzero, we must take the MAX of the end position of this field
1014 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1015
1016 We return an expression for the size. */
1017
1018 static tree
1019 merge_sizes (tree last_size,
1020 tree first_bit,
1021 tree size,
1022 int special,
1023 int has_rep)
1024 {
1025 tree type = TREE_TYPE (last_size);
1026 tree new;
1027
1028 if (! special || TREE_CODE (size) != COND_EXPR)
1029 {
1030 new = size_binop (PLUS_EXPR, first_bit, size);
1031 if (has_rep)
1032 new = size_binop (MAX_EXPR, last_size, new);
1033 }
1034
1035 else
1036 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1037 integer_zerop (TREE_OPERAND (size, 1))
1038 ? last_size : merge_sizes (last_size, first_bit,
1039 TREE_OPERAND (size, 1),
1040 1, has_rep),
1041 integer_zerop (TREE_OPERAND (size, 2))
1042 ? last_size : merge_sizes (last_size, first_bit,
1043 TREE_OPERAND (size, 2),
1044 1, has_rep)));
1045
1046 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1047 when fed through substitute_in_expr) into thinking that a constant
1048 size is not constant. */
1049 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1050 new = TREE_OPERAND (new, 0);
1051
1052 return new;
1053 }
1054
1055 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1056 related by the addition of a constant. Return that constant if so. */
1057
1058 static tree
1059 compute_related_constant (tree op0, tree op1)
1060 {
1061 tree op0_var, op1_var;
1062 tree op0_con = split_plus (op0, &op0_var);
1063 tree op1_con = split_plus (op1, &op1_var);
1064 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1065
1066 if (operand_equal_p (op0_var, op1_var, 0))
1067 return result;
1068 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1069 return result;
1070 else
1071 return 0;
1072 }
1073
1074 /* Utility function of above to split a tree OP which may be a sum, into a
1075 constant part, which is returned, and a variable part, which is stored
1076 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1077 bitsizetype. */
1078
1079 static tree
1080 split_plus (tree in, tree *pvar)
1081 {
1082 /* Strip NOPS in order to ease the tree traversal and maximize the
1083 potential for constant or plus/minus discovery. We need to be careful
1084 to always return and set *pvar to bitsizetype trees, but it's worth
1085 the effort. */
1086 STRIP_NOPS (in);
1087
1088 *pvar = convert (bitsizetype, in);
1089
1090 if (TREE_CODE (in) == INTEGER_CST)
1091 {
1092 *pvar = bitsize_zero_node;
1093 return convert (bitsizetype, in);
1094 }
1095 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1096 {
1097 tree lhs_var, rhs_var;
1098 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1099 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1100
1101 if (lhs_var == TREE_OPERAND (in, 0)
1102 && rhs_var == TREE_OPERAND (in, 1))
1103 return bitsize_zero_node;
1104
1105 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1106 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1107 }
1108 else
1109 return bitsize_zero_node;
1110 }
1111 \f
1112 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1113 subprogram. If it is void_type_node, then we are dealing with a procedure,
1114 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1115 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1116 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1117 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1118 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1119 RETURNS_WITH_DSP is nonzero if the function is to return with a
1120 depressed stack pointer. */
1121
1122 tree
1123 create_subprog_type (tree return_type,
1124 tree param_decl_list,
1125 tree cico_list,
1126 int returns_unconstrained,
1127 int returns_by_ref,
1128 int returns_with_dsp)
1129 {
1130 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1131 the subprogram formal parameters. This list is generated by traversing the
1132 input list of PARM_DECL nodes. */
1133 tree param_type_list = NULL;
1134 tree param_decl;
1135 tree type;
1136
1137 for (param_decl = param_decl_list; param_decl;
1138 param_decl = TREE_CHAIN (param_decl))
1139 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1140 param_type_list);
1141
1142 /* The list of the function parameter types has to be terminated by the void
1143 type to signal to the back-end that we are not dealing with a variable
1144 parameter subprogram, but that the subprogram has a fixed number of
1145 parameters. */
1146 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1147
1148 /* The list of argument types has been created in reverse
1149 so nreverse it. */
1150 param_type_list = nreverse (param_type_list);
1151
1152 type = build_function_type (return_type, param_type_list);
1153
1154 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1155 or the new type should, make a copy of TYPE. Likewise for
1156 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1157 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1158 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1159 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1160 type = copy_type (type);
1161
1162 SET_TYPE_CI_CO_LIST (type, cico_list);
1163 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1164 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1165 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1166 return type;
1167 }
1168 \f
1169 /* Return a copy of TYPE but safe to modify in any way. */
1170
1171 tree
1172 copy_type (tree type)
1173 {
1174 tree new = copy_node (type);
1175
1176 /* copy_node clears this field instead of copying it, because it is
1177 aliased with TREE_CHAIN. */
1178 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1179
1180 TYPE_POINTER_TO (new) = 0;
1181 TYPE_REFERENCE_TO (new) = 0;
1182 TYPE_MAIN_VARIANT (new) = new;
1183 TYPE_NEXT_VARIANT (new) = 0;
1184
1185 return new;
1186 }
1187 \f
1188 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1189 TYPE_INDEX_TYPE is INDEX. */
1190
1191 tree
1192 create_index_type (tree min, tree max, tree index)
1193 {
1194 /* First build a type for the desired range. */
1195 tree type = build_index_2_type (min, max);
1196
1197 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1198 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1199 is set, but not to INDEX, make a copy of this type with the requested
1200 index type. Note that we have no way of sharing these types, but that's
1201 only a small hole. */
1202 if (TYPE_INDEX_TYPE (type) == index)
1203 return type;
1204 else if (TYPE_INDEX_TYPE (type) != 0)
1205 type = copy_type (type);
1206
1207 SET_TYPE_INDEX_TYPE (type, index);
1208 return type;
1209 }
1210 \f
1211 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1212 string) and TYPE is a ..._TYPE node giving its data type.
1213 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1214 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1215 information about this type. */
1216
1217 tree
1218 create_type_decl (tree type_name,
1219 tree type,
1220 struct attrib *attr_list,
1221 int artificial_p,
1222 int debug_info_p)
1223 {
1224 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1225 enum tree_code code = TREE_CODE (type);
1226
1227 DECL_ARTIFICIAL (type_decl) = artificial_p;
1228 pushdecl (type_decl);
1229 process_attributes (type_decl, attr_list);
1230
1231 /* Pass type declaration information to the debugger unless this is an
1232 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1233 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1234 a dummy type, which will be completed later, or a type for which
1235 debugging information was not requested. */
1236 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1237 || ! debug_info_p)
1238 DECL_IGNORED_P (type_decl) = 1;
1239 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1240 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1241 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1242 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1243
1244 return type_decl;
1245 }
1246
1247 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1248 ASM_NAME is its assembler name (if provided). TYPE is its data type
1249 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1250 expression; NULL_TREE if none.
1251
1252 CONST_FLAG is nonzero if this variable is constant.
1253
1254 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1255 the current compilation unit. This flag should be set when processing the
1256 variable definitions in a package specification. EXTERN_FLAG is nonzero
1257 when processing an external variable declaration (as opposed to a
1258 definition: no storage is to be allocated for the variable here).
1259
1260 STATIC_FLAG is only relevant when not at top level. In that case
1261 it indicates whether to always allocate storage to the variable. */
1262
1263 tree
1264 create_var_decl (tree var_name,
1265 tree asm_name,
1266 tree type,
1267 tree var_init,
1268 int const_flag,
1269 int public_flag,
1270 int extern_flag,
1271 int static_flag,
1272 struct attrib *attr_list)
1273 {
1274 int init_const
1275 = (var_init == 0
1276 ? 0
1277 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1278 && (global_bindings_p () || static_flag
1279 ? 0 != initializer_constant_valid_p (var_init,
1280 TREE_TYPE (var_init))
1281 : TREE_CONSTANT (var_init))));
1282 tree var_decl
1283 = build_decl ((const_flag && init_const
1284 /* Only make a CONST_DECL for sufficiently-small objects.
1285 We consider complex double "sufficiently-small" */
1286 && TYPE_SIZE (type) != 0
1287 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1288 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1289 GET_MODE_SIZE (DCmode)))
1290 ? CONST_DECL : VAR_DECL, var_name, type);
1291 tree assign_init = 0;
1292
1293 /* If this is external, throw away any initializations unless this is a
1294 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1295 we are defining a global here, leave a constant initialization and save
1296 any variable elaborations for the elaboration routine. Otherwise, if
1297 the initializing expression is not the same as TYPE, generate the
1298 initialization with an assignment statement, since it knows how
1299 to do the required adjustents. If we are just annotating types,
1300 throw away the initialization if it isn't a constant. */
1301
1302 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1303 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1304 var_init = 0;
1305
1306 if (global_bindings_p () && var_init != 0 && ! init_const)
1307 {
1308 add_pending_elaborations (var_decl, var_init);
1309 var_init = 0;
1310 }
1311
1312 else if (var_init != 0
1313 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1314 != TYPE_MAIN_VARIANT (type))
1315 || (static_flag && ! init_const)))
1316 assign_init = var_init, var_init = 0;
1317
1318 DECL_COMMON (var_decl) = !flag_no_common;
1319 DECL_INITIAL (var_decl) = var_init;
1320 TREE_READONLY (var_decl) = const_flag;
1321 DECL_EXTERNAL (var_decl) = extern_flag;
1322 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1323 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1324 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1325 = TYPE_VOLATILE (type);
1326
1327 /* At the global binding level we need to allocate static storage for the
1328 variable if and only if its not external. If we are not at the top level
1329 we allocate automatic storage unless requested not to. */
1330 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1331
1332 if (asm_name != 0)
1333 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1334
1335 process_attributes (var_decl, attr_list);
1336
1337 /* Add this decl to the current binding level and generate any
1338 needed code and RTL. */
1339 var_decl = pushdecl (var_decl);
1340 expand_decl (var_decl);
1341
1342 if (DECL_CONTEXT (var_decl) != 0)
1343 expand_decl_init (var_decl);
1344
1345 /* If this is volatile, force it into memory. */
1346 if (TREE_SIDE_EFFECTS (var_decl))
1347 gnat_mark_addressable (var_decl);
1348
1349 if (TREE_CODE (var_decl) != CONST_DECL)
1350 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1351
1352 if (assign_init != 0)
1353 {
1354 /* If VAR_DECL has a padded type, convert it to the unpadded
1355 type so the assignment is done properly. */
1356 tree lhs = var_decl;
1357
1358 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1359 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1360 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1361
1362 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1363 assign_init));
1364 }
1365
1366 return var_decl;
1367 }
1368 \f
1369 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1370 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1371 this field is in a record type with a "pragma pack". If SIZE is nonzero
1372 it is the specified size for this field. If POS is nonzero, it is the bit
1373 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1374 the address of this field for aliasing purposes. */
1375
1376 tree
1377 create_field_decl (tree field_name,
1378 tree field_type,
1379 tree record_type,
1380 int packed,
1381 tree size,
1382 tree pos,
1383 int addressable)
1384 {
1385 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1386
1387 DECL_CONTEXT (field_decl) = record_type;
1388 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1389
1390 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1391 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1392 if (packed && TYPE_MODE (field_type) == BLKmode)
1393 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1394
1395 /* If a size is specified, use it. Otherwise, see if we have a size
1396 to use that may differ from the natural size of the object. */
1397 if (size != 0)
1398 size = convert (bitsizetype, size);
1399 else if (packed)
1400 {
1401 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1402 TYPE_SIZE (field_type), 0))
1403 size = rm_size (field_type);
1404
1405 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1406 byte. */
1407 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1408 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1409 size = round_up (size, BITS_PER_UNIT);
1410 }
1411
1412 /* Make a bitfield if a size is specified for two reasons: first if the size
1413 differs from the natural size. Second, if the alignment is insufficient.
1414 There are a number of ways the latter can be true. But never make a
1415 bitfield if the type of the field has a nonconstant size. */
1416
1417 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1418 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1419 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1420 || (pos != 0
1421 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1422 bitsize_int (TYPE_ALIGN
1423 (field_type)))))
1424 || packed
1425 || (TYPE_ALIGN (record_type) != 0
1426 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1427 {
1428 DECL_BIT_FIELD (field_decl) = 1;
1429 DECL_SIZE (field_decl) = size;
1430 if (! packed && pos == 0)
1431 DECL_ALIGN (field_decl)
1432 = (TYPE_ALIGN (record_type) != 0
1433 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1434 : TYPE_ALIGN (field_type));
1435 }
1436
1437 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1438 DECL_ALIGN (field_decl)
1439 = MAX (DECL_ALIGN (field_decl),
1440 DECL_BIT_FIELD (field_decl) ? 1
1441 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1442 : TYPE_ALIGN (field_type));
1443
1444 if (pos != 0)
1445 {
1446 /* We need to pass in the alignment the DECL is known to have.
1447 This is the lowest-order bit set in POS, but no more than
1448 the alignment of the record, if one is specified. Note
1449 that an alignment of 0 is taken as infinite. */
1450 unsigned int known_align;
1451
1452 if (host_integerp (pos, 1))
1453 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1454 else
1455 known_align = BITS_PER_UNIT;
1456
1457 if (TYPE_ALIGN (record_type)
1458 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1459 known_align = TYPE_ALIGN (record_type);
1460
1461 layout_decl (field_decl, known_align);
1462 SET_DECL_OFFSET_ALIGN (field_decl,
1463 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1464 : BITS_PER_UNIT);
1465 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1466 &DECL_FIELD_BIT_OFFSET (field_decl),
1467 DECL_OFFSET_ALIGN (field_decl), pos);
1468
1469 DECL_HAS_REP_P (field_decl) = 1;
1470 }
1471
1472 /* If the field type is passed by reference, we will have pointers to the
1473 field, so it is addressable. */
1474 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1475 addressable = 1;
1476
1477 /* ??? For now, we say that any field of aggregate type is addressable
1478 because the front end may take 'Reference of it. */
1479 if (AGGREGATE_TYPE_P (field_type))
1480 addressable = 1;
1481
1482 /* Mark the decl as nonaddressable if it either is indicated so semantically
1483 or if it is a bit field. */
1484 DECL_NONADDRESSABLE_P (field_decl)
1485 = ! addressable || DECL_BIT_FIELD (field_decl);
1486
1487 return field_decl;
1488 }
1489
1490 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1491 effects, has the value of zero. */
1492
1493 static int
1494 value_zerop (tree exp)
1495 {
1496 if (TREE_CODE (exp) == COMPOUND_EXPR)
1497 return value_zerop (TREE_OPERAND (exp, 1));
1498
1499 return integer_zerop (exp);
1500 }
1501 \f
1502 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1503 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1504 readonly (either an IN parameter or an address of a pass-by-ref
1505 parameter). */
1506
1507 tree
1508 create_param_decl (tree param_name, tree param_type, int readonly)
1509 {
1510 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1511
1512 /* Honor the PROMOTE_PROTOTYPES target macro, as not doing so can
1513 lead to various ABI violations. */
1514 #ifdef PROMOTE_PROTOTYPES
1515 if ((TREE_CODE (param_type) == INTEGER_TYPE
1516 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1517 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1518 {
1519 /* We have to be careful about biased types here. Make a subtype
1520 of integer_type_node with the proper biasing. */
1521 if (TREE_CODE (param_type) == INTEGER_TYPE
1522 && TYPE_BIASED_REPRESENTATION_P (param_type))
1523 {
1524 param_type
1525 = copy_type (build_range_type (integer_type_node,
1526 TYPE_MIN_VALUE (param_type),
1527 TYPE_MAX_VALUE (param_type)));
1528
1529 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1530 }
1531 else
1532 param_type = integer_type_node;
1533 }
1534 #endif
1535
1536 DECL_ARG_TYPE (param_decl) = param_type;
1537 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1538 TREE_READONLY (param_decl) = readonly;
1539 return param_decl;
1540 }
1541 \f
1542 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1543
1544 void
1545 process_attributes (tree decl, struct attrib *attr_list)
1546 {
1547 for (; attr_list; attr_list = attr_list->next)
1548 switch (attr_list->type)
1549 {
1550 case ATTR_MACHINE_ATTRIBUTE:
1551 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1552 NULL_TREE),
1553 ATTR_FLAG_TYPE_IN_PLACE);
1554 break;
1555
1556 case ATTR_LINK_ALIAS:
1557 TREE_STATIC (decl) = 1;
1558 assemble_alias (decl, attr_list->name);
1559 break;
1560
1561 case ATTR_WEAK_EXTERNAL:
1562 if (SUPPORTS_WEAK)
1563 declare_weak (decl);
1564 else
1565 post_error ("?weak declarations not supported on this target",
1566 attr_list->error_point);
1567 break;
1568
1569 case ATTR_LINK_SECTION:
1570 if (targetm.have_named_sections)
1571 {
1572 DECL_SECTION_NAME (decl)
1573 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1574 IDENTIFIER_POINTER (attr_list->name));
1575 DECL_COMMON (decl) = 0;
1576 }
1577 else
1578 post_error ("?section attributes are not supported for this target",
1579 attr_list->error_point);
1580 break;
1581 }
1582 }
1583 \f
1584 /* Add some pending elaborations on the list. */
1585
1586 void
1587 add_pending_elaborations (tree var_decl, tree var_init)
1588 {
1589 if (var_init != 0)
1590 Check_Elaboration_Code_Allowed (error_gnat_node);
1591
1592 pending_elaborations
1593 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1594 }
1595
1596 /* Obtain any pending elaborations and clear the old list. */
1597
1598 tree
1599 get_pending_elaborations (void)
1600 {
1601 /* Each thing added to the list went on the end; we want it on the
1602 beginning. */
1603 tree result = TREE_CHAIN (pending_elaborations);
1604
1605 TREE_CHAIN (pending_elaborations) = 0;
1606 return result;
1607 }
1608
1609 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1610 of 2. */
1611
1612 static int
1613 value_factor_p (tree value, int factor)
1614 {
1615 if (host_integerp (value, 1))
1616 return tree_low_cst (value, 1) % factor == 0;
1617
1618 if (TREE_CODE (value) == MULT_EXPR)
1619 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1620 || value_factor_p (TREE_OPERAND (value, 1), factor));
1621
1622 return 0;
1623 }
1624
1625 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1626 unless we can prove these 2 fields are laid out in such a way that no gap
1627 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1628 is the distance in bits between the end of PREV_FIELD and the starting
1629 position of CURR_FIELD. It is ignored if null. */
1630
1631 static int
1632 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1633 {
1634 /* If this is the first field of the record, there cannot be any gap */
1635 if (!prev_field)
1636 return 0;
1637
1638 /* If the previous field is a union type, then return False: The only
1639 time when such a field is not the last field of the record is when
1640 there are other components at fixed positions after it (meaning there
1641 was a rep clause for every field), in which case we don't want the
1642 alignment constraint to override them. */
1643 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1644 return 0;
1645
1646 /* If the distance between the end of prev_field and the begining of
1647 curr_field is constant, then there is a gap if the value of this
1648 constant is not null. */
1649 if (offset && host_integerp (offset, 1))
1650 return (!integer_zerop (offset));
1651
1652 /* If the size and position of the previous field are constant,
1653 then check the sum of this size and position. There will be a gap
1654 iff it is not multiple of the current field alignment. */
1655 if (host_integerp (DECL_SIZE (prev_field), 1)
1656 && host_integerp (bit_position (prev_field), 1))
1657 return ((tree_low_cst (bit_position (prev_field), 1)
1658 + tree_low_cst (DECL_SIZE (prev_field), 1))
1659 % DECL_ALIGN (curr_field) != 0);
1660
1661 /* If both the position and size of the previous field are multiples
1662 of the current field alignment, there can not be any gap. */
1663 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1664 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1665 return 0;
1666
1667 /* Fallback, return that there may be a potential gap */
1668 return 1;
1669 }
1670
1671 /* Return nonzero if there are pending elaborations. */
1672
1673 int
1674 pending_elaborations_p (void)
1675 {
1676 return TREE_CHAIN (pending_elaborations) != 0;
1677 }
1678
1679 /* Save a copy of the current pending elaboration list and make a new
1680 one. */
1681
1682 void
1683 push_pending_elaborations (void)
1684 {
1685 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1686
1687 p->next = elist_stack;
1688 p->elab_list = pending_elaborations;
1689 elist_stack = p;
1690 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1691 }
1692
1693 /* Pop the stack of pending elaborations. */
1694
1695 void
1696 pop_pending_elaborations (void)
1697 {
1698 struct e_stack *p = elist_stack;
1699
1700 pending_elaborations = p->elab_list;
1701 elist_stack = p->next;
1702 }
1703
1704 /* Return the current position in pending_elaborations so we can insert
1705 elaborations after that point. */
1706
1707 tree
1708 get_elaboration_location (void)
1709 {
1710 return tree_last (pending_elaborations);
1711 }
1712
1713 /* Insert the current elaborations after ELAB, which is in some elaboration
1714 list. */
1715
1716 void
1717 insert_elaboration_list (tree elab)
1718 {
1719 tree next = TREE_CHAIN (elab);
1720
1721 if (TREE_CHAIN (pending_elaborations))
1722 {
1723 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1724 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1725 TREE_CHAIN (pending_elaborations) = 0;
1726 }
1727 }
1728
1729 /* Returns a LABEL_DECL node for LABEL_NAME. */
1730
1731 tree
1732 create_label_decl (tree label_name)
1733 {
1734 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1735
1736 DECL_CONTEXT (label_decl) = current_function_decl;
1737 DECL_MODE (label_decl) = VOIDmode;
1738 DECL_SOURCE_LOCATION (label_decl) = input_location;
1739
1740 return label_decl;
1741 }
1742 \f
1743 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1744 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1745 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1746 PARM_DECL nodes chained through the TREE_CHAIN field).
1747
1748 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1749 appropriate fields in the FUNCTION_DECL. */
1750
1751 tree
1752 create_subprog_decl (tree subprog_name,
1753 tree asm_name,
1754 tree subprog_type,
1755 tree param_decl_list,
1756 int inline_flag,
1757 int public_flag,
1758 int extern_flag,
1759 struct attrib *attr_list)
1760 {
1761 tree return_type = TREE_TYPE (subprog_type);
1762 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1763
1764 /* If this is a function nested inside an inlined external function, it
1765 means we aren't going to compile the outer function unless it is
1766 actually inlined, so do the same for us. */
1767 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1768 && DECL_EXTERNAL (current_function_decl))
1769 extern_flag = 1;
1770
1771 DECL_EXTERNAL (subprog_decl) = extern_flag;
1772 TREE_PUBLIC (subprog_decl) = public_flag;
1773 DECL_INLINE (subprog_decl) = inline_flag;
1774 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1775 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1776 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1777 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1778 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1779
1780 if (asm_name != 0)
1781 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1782
1783 process_attributes (subprog_decl, attr_list);
1784
1785 /* Add this decl to the current binding level. */
1786 subprog_decl = pushdecl (subprog_decl);
1787
1788 /* Output the assembler code and/or RTL for the declaration. */
1789 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1790
1791 return subprog_decl;
1792 }
1793 \f
1794 /* Count how deep we are into nested functions. This is because
1795 we shouldn't call the backend function context routines unless we
1796 are in a nested function. */
1797
1798 static int function_nesting_depth;
1799
1800 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1801 body. This routine needs to be invoked before processing the declarations
1802 appearing in the subprogram. */
1803
1804 void
1805 begin_subprog_body (tree subprog_decl)
1806 {
1807 tree param_decl_list;
1808 tree param_decl;
1809 tree next_param;
1810
1811 if (function_nesting_depth++ != 0)
1812 push_function_context ();
1813
1814 announce_function (subprog_decl);
1815
1816 /* Make this field nonzero so further routines know that this is not
1817 tentative. error_mark_node is replaced below (in poplevel) with the
1818 adequate BLOCK. */
1819 DECL_INITIAL (subprog_decl) = error_mark_node;
1820
1821 /* This function exists in static storage. This does not mean `static' in
1822 the C sense! */
1823 TREE_STATIC (subprog_decl) = 1;
1824
1825 /* Enter a new binding level. */
1826 current_function_decl = subprog_decl;
1827 pushlevel (0);
1828
1829 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1830 subprogram body) so that they can be recognized as local variables in the
1831 subprogram.
1832
1833 The list of PARM_DECL nodes is stored in the right order in
1834 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1835 which they are transmitted to `pushdecl' we need to reverse the list of
1836 PARM_DECLs if we want it to be stored in the right order. The reason why
1837 we want to make sure the PARM_DECLs are stored in the correct order is
1838 that this list will be retrieved in a few lines with a call to `getdecl'
1839 to store it back into the DECL_ARGUMENTS field. */
1840 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1841
1842 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1843 {
1844 next_param = TREE_CHAIN (param_decl);
1845 TREE_CHAIN (param_decl) = NULL;
1846 pushdecl (param_decl);
1847 }
1848
1849 /* Store back the PARM_DECL nodes. They appear in the right order. */
1850 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1851
1852 init_function_start (subprog_decl);
1853 expand_function_start (subprog_decl, 0);
1854
1855 /* If this function is `main', emit a call to `__main'
1856 to run global initializers, etc. */
1857 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1858 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1859 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1860 expand_main_function ();
1861 }
1862
1863 /* Finish the definition of the current subprogram and compile it all the way
1864 to assembler language output. */
1865
1866 void
1867 end_subprog_body (void)
1868 {
1869 tree decl;
1870 tree cico_list;
1871
1872 poplevel (1, 0, 1);
1873 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1874 = current_function_decl;
1875
1876 /* Mark the RESULT_DECL as being in this subprogram. */
1877 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1878
1879 expand_function_end ();
1880
1881 /* If this is a nested function, push a new GC context. That will keep
1882 local variables on the stack from being collected while we're doing
1883 the compilation of this function. */
1884 if (function_nesting_depth > 1)
1885 ggc_push_context ();
1886
1887 rest_of_compilation (current_function_decl);
1888
1889 if (function_nesting_depth > 1)
1890 ggc_pop_context ();
1891
1892 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1893 not be seen when we call this function and will be in
1894 unallocated memory anyway. */
1895 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1896 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1897 TREE_VALUE (cico_list) = 0;
1898
1899 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1900 {
1901 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1902 was saved for inline, in which case the DECL_RTLs are in
1903 preserved memory. */
1904 for (decl = DECL_ARGUMENTS (current_function_decl);
1905 decl != 0; decl = TREE_CHAIN (decl))
1906 {
1907 SET_DECL_RTL (decl, 0);
1908 DECL_INCOMING_RTL (decl) = 0;
1909 }
1910
1911 /* Similarly, discard DECL_RTL of the return value. */
1912 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1913
1914 /* But DECL_INITIAL must remain nonzero so we know this
1915 was an actual function definition unless toplev.c decided not
1916 to inline it. */
1917 if (DECL_INITIAL (current_function_decl) != 0)
1918 DECL_INITIAL (current_function_decl) = error_mark_node;
1919
1920 DECL_ARGUMENTS (current_function_decl) = 0;
1921 }
1922
1923 /* If we are not at the bottom of the function nesting stack, pop up to
1924 the containing function. Otherwise show we aren't in any function. */
1925 if (--function_nesting_depth != 0)
1926 pop_function_context ();
1927 else
1928 current_function_decl = 0;
1929 }
1930 \f
1931 /* Return a definition for a builtin function named NAME and whose data type
1932 is TYPE. TYPE should be a function type with argument types.
1933 FUNCTION_CODE tells later passes how to compile calls to this function.
1934 See tree.h for its possible values.
1935
1936 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1937 the name to be called if we can't opencode the function. If
1938 ATTRS is nonzero, use that for the function attribute list. */
1939
1940 tree
1941 builtin_function (const char *name,
1942 tree type,
1943 int function_code,
1944 enum built_in_class class,
1945 const char *library_name,
1946 tree attrs)
1947 {
1948 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1949
1950 DECL_EXTERNAL (decl) = 1;
1951 TREE_PUBLIC (decl) = 1;
1952 if (library_name)
1953 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1954
1955 pushdecl (decl);
1956 DECL_BUILT_IN_CLASS (decl) = class;
1957 DECL_FUNCTION_CODE (decl) = function_code;
1958 if (attrs)
1959 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1960 return decl;
1961 }
1962
1963 /* Return an integer type with the number of bits of precision given by
1964 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1965 it is a signed type. */
1966
1967 tree
1968 gnat_type_for_size (unsigned precision, int unsignedp)
1969 {
1970 tree t;
1971 char type_name[20];
1972
1973 if (precision <= 2 * MAX_BITS_PER_WORD
1974 && signed_and_unsigned_types[precision][unsignedp] != 0)
1975 return signed_and_unsigned_types[precision][unsignedp];
1976
1977 if (unsignedp)
1978 t = make_unsigned_type (precision);
1979 else
1980 t = make_signed_type (precision);
1981
1982 if (precision <= 2 * MAX_BITS_PER_WORD)
1983 signed_and_unsigned_types[precision][unsignedp] = t;
1984
1985 if (TYPE_NAME (t) == 0)
1986 {
1987 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1988 TYPE_NAME (t) = get_identifier (type_name);
1989 }
1990
1991 return t;
1992 }
1993
1994 /* Likewise for floating-point types. */
1995
1996 static tree
1997 float_type_for_precision (int precision, enum machine_mode mode)
1998 {
1999 tree t;
2000 char type_name[20];
2001
2002 if (float_types[(int) mode] != 0)
2003 return float_types[(int) mode];
2004
2005 float_types[(int) mode] = t = make_node (REAL_TYPE);
2006 TYPE_PRECISION (t) = precision;
2007 layout_type (t);
2008
2009 if (TYPE_MODE (t) != mode)
2010 gigi_abort (414);
2011
2012 if (TYPE_NAME (t) == 0)
2013 {
2014 sprintf (type_name, "FLOAT_%d", precision);
2015 TYPE_NAME (t) = get_identifier (type_name);
2016 }
2017
2018 return t;
2019 }
2020
2021 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2022 an unsigned type; otherwise a signed type is returned. */
2023
2024 tree
2025 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2026 {
2027 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2028 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2029 else
2030 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2031 }
2032
2033 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2034
2035 tree
2036 gnat_unsigned_type (tree type_node)
2037 {
2038 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2039
2040 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2041 {
2042 type = copy_node (type);
2043 TREE_TYPE (type) = type_node;
2044 }
2045 else if (TREE_TYPE (type_node) != 0
2046 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2047 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2048 {
2049 type = copy_node (type);
2050 TREE_TYPE (type) = TREE_TYPE (type_node);
2051 }
2052
2053 return type;
2054 }
2055
2056 /* Return the signed version of a TYPE_NODE, a scalar type. */
2057
2058 tree
2059 gnat_signed_type (tree type_node)
2060 {
2061 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2062
2063 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2064 {
2065 type = copy_node (type);
2066 TREE_TYPE (type) = type_node;
2067 }
2068 else if (TREE_TYPE (type_node) != 0
2069 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2070 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2071 {
2072 type = copy_node (type);
2073 TREE_TYPE (type) = TREE_TYPE (type_node);
2074 }
2075
2076 return type;
2077 }
2078
2079 /* Return a type the same as TYPE except unsigned or signed according to
2080 UNSIGNEDP. */
2081
2082 tree
2083 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2084 {
2085 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2086 return type;
2087 else
2088 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2089 }
2090 \f
2091 /* EXP is an expression for the size of an object. If this size contains
2092 discriminant references, replace them with the maximum (if MAX_P) or
2093 minimum (if ! MAX_P) possible value of the discriminant. */
2094
2095 tree
2096 max_size (tree exp, int max_p)
2097 {
2098 enum tree_code code = TREE_CODE (exp);
2099 tree type = TREE_TYPE (exp);
2100
2101 switch (TREE_CODE_CLASS (code))
2102 {
2103 case 'd':
2104 case 'c':
2105 return exp;
2106
2107 case 'x':
2108 if (code == TREE_LIST)
2109 return tree_cons (TREE_PURPOSE (exp),
2110 max_size (TREE_VALUE (exp), max_p),
2111 TREE_CHAIN (exp) != 0
2112 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2113 break;
2114
2115 case 'r':
2116 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2117 modify. Otherwise, we treat it like a variable. */
2118 if (! CONTAINS_PLACEHOLDER_P (exp))
2119 return exp;
2120
2121 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2122 return
2123 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2124
2125 case '<':
2126 return max_p ? size_one_node : size_zero_node;
2127
2128 case '1':
2129 case '2':
2130 case 'e':
2131 switch (TREE_CODE_LENGTH (code))
2132 {
2133 case 1:
2134 if (code == NON_LVALUE_EXPR)
2135 return max_size (TREE_OPERAND (exp, 0), max_p);
2136 else
2137 return
2138 fold (build1 (code, type,
2139 max_size (TREE_OPERAND (exp, 0),
2140 code == NEGATE_EXPR ? ! max_p : max_p)));
2141
2142 case 2:
2143 if (code == RTL_EXPR)
2144 gigi_abort (407);
2145 else if (code == COMPOUND_EXPR)
2146 return max_size (TREE_OPERAND (exp, 1), max_p);
2147 else if (code == WITH_RECORD_EXPR)
2148 return exp;
2149
2150 {
2151 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2152 tree rhs = max_size (TREE_OPERAND (exp, 1),
2153 code == MINUS_EXPR ? ! max_p : max_p);
2154
2155 /* Special-case wanting the maximum value of a MIN_EXPR.
2156 In that case, if one side overflows, return the other.
2157 sizetype is signed, but we know sizes are non-negative.
2158 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2159 overflowing or the maximum possible value and the RHS
2160 a variable. */
2161 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2162 return lhs;
2163 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2164 return rhs;
2165 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2166 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2167 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2168 && ! TREE_CONSTANT (rhs))
2169 return lhs;
2170 else
2171 return fold (build (code, type, lhs, rhs));
2172 }
2173
2174 case 3:
2175 if (code == SAVE_EXPR)
2176 return exp;
2177 else if (code == COND_EXPR)
2178 return fold (build (MAX_EXPR, type,
2179 max_size (TREE_OPERAND (exp, 1), max_p),
2180 max_size (TREE_OPERAND (exp, 2), max_p)));
2181 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2182 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2183 max_size (TREE_OPERAND (exp, 1), max_p));
2184 }
2185 }
2186
2187 gigi_abort (408);
2188 }
2189 \f
2190 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2191 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2192 Return a constructor for the template. */
2193
2194 tree
2195 build_template (tree template_type, tree array_type, tree expr)
2196 {
2197 tree template_elts = NULL_TREE;
2198 tree bound_list = NULL_TREE;
2199 tree field;
2200
2201 if (TREE_CODE (array_type) == RECORD_TYPE
2202 && (TYPE_IS_PADDING_P (array_type)
2203 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2204 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2205
2206 if (TREE_CODE (array_type) == ARRAY_TYPE
2207 || (TREE_CODE (array_type) == INTEGER_TYPE
2208 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2209 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2210
2211 /* First make the list for a CONSTRUCTOR for the template. Go down the
2212 field list of the template instead of the type chain because this
2213 array might be an Ada array of arrays and we can't tell where the
2214 nested arrays stop being the underlying object. */
2215
2216 for (field = TYPE_FIELDS (template_type); field;
2217 (bound_list != 0
2218 ? (bound_list = TREE_CHAIN (bound_list))
2219 : (array_type = TREE_TYPE (array_type))),
2220 field = TREE_CHAIN (TREE_CHAIN (field)))
2221 {
2222 tree bounds, min, max;
2223
2224 /* If we have a bound list, get the bounds from there. Likewise
2225 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2226 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2227 This will give us a maximum range. */
2228 if (bound_list != 0)
2229 bounds = TREE_VALUE (bound_list);
2230 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2231 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2232 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2233 && DECL_BY_COMPONENT_PTR_P (expr))
2234 bounds = TREE_TYPE (field);
2235 else
2236 gigi_abort (411);
2237
2238 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2239 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2240
2241 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2242 surround them with a WITH_RECORD_EXPR giving EXPR as the
2243 OBJECT. */
2244 if (CONTAINS_PLACEHOLDER_P (min))
2245 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2246 if (CONTAINS_PLACEHOLDER_P (max))
2247 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2248
2249 template_elts = tree_cons (TREE_CHAIN (field), max,
2250 tree_cons (field, min, template_elts));
2251 }
2252
2253 return gnat_build_constructor (template_type, nreverse (template_elts));
2254 }
2255 \f
2256 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2257 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2258 in the type contains in its DECL_INITIAL the expression to use when
2259 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2260 to print out an error message if the mechanism cannot be applied to
2261 an object of that type and also for the name. */
2262
2263 tree
2264 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2265 {
2266 tree record_type = make_node (RECORD_TYPE);
2267 tree field_list = 0;
2268 int class;
2269 int dtype = 0;
2270 tree inner_type;
2271 int ndim;
2272 int i;
2273 tree *idx_arr;
2274 tree tem;
2275
2276 /* If TYPE is an unconstrained array, use the underlying array type. */
2277 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2278 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2279
2280 /* If this is an array, compute the number of dimensions in the array,
2281 get the index types, and point to the inner type. */
2282 if (TREE_CODE (type) != ARRAY_TYPE)
2283 ndim = 0;
2284 else
2285 for (ndim = 1, inner_type = type;
2286 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2287 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2288 ndim++, inner_type = TREE_TYPE (inner_type))
2289 ;
2290
2291 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2292
2293 if (mech != By_Descriptor_NCA
2294 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2295 for (i = ndim - 1, inner_type = type;
2296 i >= 0;
2297 i--, inner_type = TREE_TYPE (inner_type))
2298 idx_arr[i] = TYPE_DOMAIN (inner_type);
2299 else
2300 for (i = 0, inner_type = type;
2301 i < ndim;
2302 i++, inner_type = TREE_TYPE (inner_type))
2303 idx_arr[i] = TYPE_DOMAIN (inner_type);
2304
2305 /* Now get the DTYPE value. */
2306 switch (TREE_CODE (type))
2307 {
2308 case INTEGER_TYPE:
2309 case ENUMERAL_TYPE:
2310 if (TYPE_VAX_FLOATING_POINT_P (type))
2311 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2312 {
2313 case 6:
2314 dtype = 10;
2315 break;
2316 case 9:
2317 dtype = 11;
2318 break;
2319 case 15:
2320 dtype = 27;
2321 break;
2322 }
2323 else
2324 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2325 {
2326 case 8:
2327 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2328 break;
2329 case 16:
2330 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2331 break;
2332 case 32:
2333 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2334 break;
2335 case 64:
2336 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2337 break;
2338 case 128:
2339 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2340 break;
2341 }
2342 break;
2343
2344 case REAL_TYPE:
2345 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2346 break;
2347
2348 case COMPLEX_TYPE:
2349 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2350 && TYPE_VAX_FLOATING_POINT_P (type))
2351 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2352 {
2353 case 6:
2354 dtype = 12;
2355 break;
2356 case 9:
2357 dtype = 13;
2358 break;
2359 case 15:
2360 dtype = 29;
2361 }
2362 else
2363 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2364 break;
2365
2366 case ARRAY_TYPE:
2367 dtype = 14;
2368 break;
2369
2370 default:
2371 break;
2372 }
2373
2374 /* Get the CLASS value. */
2375 switch (mech)
2376 {
2377 case By_Descriptor_A:
2378 class = 4;
2379 break;
2380 case By_Descriptor_NCA:
2381 class = 10;
2382 break;
2383 case By_Descriptor_SB:
2384 class = 15;
2385 break;
2386 default:
2387 class = 1;
2388 }
2389
2390 /* Make the type for a descriptor for VMS. The first four fields
2391 are the same for all types. */
2392
2393 field_list
2394 = chainon (field_list,
2395 make_descriptor_field
2396 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2397 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2398
2399 field_list = chainon (field_list,
2400 make_descriptor_field ("DTYPE",
2401 gnat_type_for_size (8, 1),
2402 record_type, size_int (dtype)));
2403 field_list = chainon (field_list,
2404 make_descriptor_field ("CLASS",
2405 gnat_type_for_size (8, 1),
2406 record_type, size_int (class)));
2407
2408 field_list
2409 = chainon (field_list,
2410 make_descriptor_field ("POINTER",
2411 build_pointer_type (type),
2412 record_type,
2413 build1 (ADDR_EXPR,
2414 build_pointer_type (type),
2415 build (PLACEHOLDER_EXPR,
2416 type))));
2417
2418 switch (mech)
2419 {
2420 case By_Descriptor:
2421 case By_Descriptor_S:
2422 break;
2423
2424 case By_Descriptor_SB:
2425 field_list
2426 = chainon (field_list,
2427 make_descriptor_field
2428 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2429 TREE_CODE (type) == ARRAY_TYPE
2430 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2431 field_list
2432 = chainon (field_list,
2433 make_descriptor_field
2434 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2435 TREE_CODE (type) == ARRAY_TYPE
2436 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2437 break;
2438
2439 case By_Descriptor_A:
2440 case By_Descriptor_NCA:
2441 field_list = chainon (field_list,
2442 make_descriptor_field ("SCALE",
2443 gnat_type_for_size (8, 1),
2444 record_type,
2445 size_zero_node));
2446
2447 field_list = chainon (field_list,
2448 make_descriptor_field ("DIGITS",
2449 gnat_type_for_size (8, 1),
2450 record_type,
2451 size_zero_node));
2452
2453 field_list
2454 = chainon (field_list,
2455 make_descriptor_field
2456 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2457 size_int (mech == By_Descriptor_NCA
2458 ? 0
2459 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2460 : (TREE_CODE (type) == ARRAY_TYPE
2461 && TYPE_CONVENTION_FORTRAN_P (type)
2462 ? 224 : 192))));
2463
2464 field_list = chainon (field_list,
2465 make_descriptor_field ("DIMCT",
2466 gnat_type_for_size (8, 1),
2467 record_type,
2468 size_int (ndim)));
2469
2470 field_list = chainon (field_list,
2471 make_descriptor_field ("ARSIZE",
2472 gnat_type_for_size (32, 1),
2473 record_type,
2474 size_in_bytes (type)));
2475
2476 /* Now build a pointer to the 0,0,0... element. */
2477 tem = build (PLACEHOLDER_EXPR, type);
2478 for (i = 0, inner_type = type; i < ndim;
2479 i++, inner_type = TREE_TYPE (inner_type))
2480 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2481 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2482
2483 field_list
2484 = chainon (field_list,
2485 make_descriptor_field
2486 ("A0", build_pointer_type (inner_type), record_type,
2487 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2488
2489 /* Next come the addressing coefficients. */
2490 tem = size_int (1);
2491 for (i = 0; i < ndim; i++)
2492 {
2493 char fname[3];
2494 tree idx_length
2495 = size_binop (MULT_EXPR, tem,
2496 size_binop (PLUS_EXPR,
2497 size_binop (MINUS_EXPR,
2498 TYPE_MAX_VALUE (idx_arr[i]),
2499 TYPE_MIN_VALUE (idx_arr[i])),
2500 size_int (1)));
2501
2502 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2503 fname[1] = '0' + i, fname[2] = 0;
2504 field_list
2505 = chainon (field_list,
2506 make_descriptor_field (fname,
2507 gnat_type_for_size (32, 1),
2508 record_type, idx_length));
2509
2510 if (mech == By_Descriptor_NCA)
2511 tem = idx_length;
2512 }
2513
2514 /* Finally here are the bounds. */
2515 for (i = 0; i < ndim; i++)
2516 {
2517 char fname[3];
2518
2519 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2520 field_list
2521 = chainon (field_list,
2522 make_descriptor_field
2523 (fname, gnat_type_for_size (32, 1), record_type,
2524 TYPE_MIN_VALUE (idx_arr[i])));
2525
2526 fname[0] = 'U';
2527 field_list
2528 = chainon (field_list,
2529 make_descriptor_field
2530 (fname, gnat_type_for_size (32, 1), record_type,
2531 TYPE_MAX_VALUE (idx_arr[i])));
2532 }
2533 break;
2534
2535 default:
2536 post_error ("unsupported descriptor type for &", gnat_entity);
2537 }
2538
2539 finish_record_type (record_type, field_list, 0, 1);
2540 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2541 record_type));
2542
2543 return record_type;
2544 }
2545
2546 /* Utility routine for above code to make a field. */
2547
2548 static tree
2549 make_descriptor_field (const char *name, tree type,
2550 tree rec_type, tree initial)
2551 {
2552 tree field
2553 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2554
2555 DECL_INITIAL (field) = initial;
2556 return field;
2557 }
2558 \f
2559 /* Build a type to be used to represent an aliased object whose nominal
2560 type is an unconstrained array. This consists of a RECORD_TYPE containing
2561 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2562 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2563 is used to represent an arbitrary unconstrained object. Use NAME
2564 as the name of the record. */
2565
2566 tree
2567 build_unc_object_type (tree template_type, tree object_type, tree name)
2568 {
2569 tree type = make_node (RECORD_TYPE);
2570 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2571 template_type, type, 0, 0, 0, 1);
2572 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2573 type, 0, 0, 0, 1);
2574
2575 TYPE_NAME (type) = name;
2576 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2577 finish_record_type (type,
2578 chainon (chainon (NULL_TREE, template_field),
2579 array_field),
2580 0, 0);
2581
2582 return type;
2583 }
2584 \f
2585 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2586 the normal case this is just two adjustments, but we have more to do
2587 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2588
2589 void
2590 update_pointer_to (tree old_type, tree new_type)
2591 {
2592 tree ptr = TYPE_POINTER_TO (old_type);
2593 tree ref = TYPE_REFERENCE_TO (old_type);
2594 tree type;
2595
2596 /* If this is the main variant, process all the other variants first. */
2597 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2598 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2599 type = TYPE_NEXT_VARIANT (type))
2600 update_pointer_to (type, new_type);
2601
2602 /* If no pointer or reference, we are done. */
2603 if (ptr == 0 && ref == 0)
2604 return;
2605
2606 /* Merge the old type qualifiers in the new type.
2607
2608 Each old variant has qualifiers for specific reasons, and the new
2609 designated type as well. Each set of qualifiers represents useful
2610 information grabbed at some point, and merging the two simply unifies
2611 these inputs into the final type description.
2612
2613 Consider for instance a volatile type frozen after an access to constant
2614 type designating it. After the designated type freeze, we get here with a
2615 volatile new_type and a dummy old_type with a readonly variant, created
2616 when the access type was processed. We shall make a volatile and readonly
2617 designated type, because that's what it really is.
2618
2619 We might also get here for a non-dummy old_type variant with different
2620 qualifiers than the new_type ones, for instance in some cases of pointers
2621 to private record type elaboration (see the comments around the call to
2622 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2623 qualifiers in thoses cases too, to avoid accidentally discarding the
2624 initial set, and will often end up with old_type == new_type then. */
2625 new_type = build_qualified_type (new_type,
2626 TYPE_QUALS (old_type)
2627 | TYPE_QUALS (new_type));
2628
2629 /* If the new type and the old one are identical, there is nothing to
2630 update. */
2631 if (old_type == new_type)
2632 return;
2633
2634 /* Otherwise, first handle the simple case. */
2635 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2636 {
2637 if (ptr != 0)
2638 TREE_TYPE (ptr) = new_type;
2639 TYPE_POINTER_TO (new_type) = ptr;
2640
2641 if (ref != 0)
2642 TREE_TYPE (ref) = new_type;
2643 TYPE_REFERENCE_TO (new_type) = ref;
2644
2645 if (ptr != 0 && TYPE_NAME (ptr) != 0
2646 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2647 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2648 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2649 global_bindings_p (), 0);
2650 if (ref != 0 && TYPE_NAME (ref) != 0
2651 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2652 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2653 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2654 global_bindings_p (), 0);
2655 }
2656
2657 /* Now deal with the unconstrained array case. In this case the "pointer"
2658 is actually a RECORD_TYPE where the types of both fields are
2659 pointers to void. In that case, copy the field list from the
2660 old type to the new one and update the fields' context. */
2661 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2662 gigi_abort (412);
2663
2664 else
2665 {
2666 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2667 tree ptr_temp_type;
2668 tree new_ref;
2669 tree var;
2670
2671 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2672 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2673 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2674
2675 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2676 template bounds.
2677
2678 ??? This is now the only use of gnat_substitute_in_type, which
2679 is now a very "heavy" routine to do this, so it should be replaced
2680 at some point. */
2681 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2682 new_ref = build (COMPONENT_REF, ptr_temp_type,
2683 build (PLACEHOLDER_EXPR, ptr),
2684 TREE_CHAIN (TYPE_FIELDS (ptr)));
2685
2686 update_pointer_to
2687 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2688 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2689 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2690
2691 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2692 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2693
2694 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2695 = TREE_TYPE (new_type) = ptr;
2696
2697 /* Now handle updating the allocation record, what the thin pointer
2698 points to. Update all pointers from the old record into the new
2699 one, update the types of the fields, and recompute the size. */
2700
2701 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2702
2703 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2704 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2705 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2706 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2707 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2708 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2709 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2710
2711 TYPE_SIZE (new_obj_rec)
2712 = size_binop (PLUS_EXPR,
2713 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2714 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2715 TYPE_SIZE_UNIT (new_obj_rec)
2716 = size_binop (PLUS_EXPR,
2717 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2718 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2719 rest_of_type_compilation (ptr, global_bindings_p ());
2720 }
2721 }
2722 \f
2723 /* Convert a pointer to a constrained array into a pointer to a fat
2724 pointer. This involves making or finding a template. */
2725
2726 static tree
2727 convert_to_fat_pointer (tree type, tree expr)
2728 {
2729 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2730 tree template, template_addr;
2731 tree etype = TREE_TYPE (expr);
2732
2733 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2734 pointer to the template and array. */
2735 if (integer_zerop (expr))
2736 return
2737 gnat_build_constructor
2738 (type,
2739 tree_cons (TYPE_FIELDS (type),
2740 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2741 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2742 convert (build_pointer_type (template_type),
2743 expr),
2744 NULL_TREE)));
2745
2746 /* If EXPR is a thin pointer, make the template and data from the record. */
2747
2748 else if (TYPE_THIN_POINTER_P (etype))
2749 {
2750 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2751
2752 expr = save_expr (expr);
2753 if (TREE_CODE (expr) == ADDR_EXPR)
2754 expr = TREE_OPERAND (expr, 0);
2755 else
2756 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2757
2758 template = build_component_ref (expr, NULL_TREE, fields, 0);
2759 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2760 build_component_ref (expr, NULL_TREE,
2761 TREE_CHAIN (fields), 0));
2762 }
2763 else
2764 /* Otherwise, build the constructor for the template. */
2765 template = build_template (template_type, TREE_TYPE (etype), expr);
2766
2767 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2768
2769 /* The result is a CONSTRUCTOR for the fat pointer.
2770
2771 If expr is an argument of a foreign convention subprogram, the type it
2772 points to is directly the component type. In this case, the expression
2773 type may not match the corresponding FIELD_DECL type at this point, so we
2774 call "convert" here to fix that up if necessary. This type consistency is
2775 required, for instance because it ensures that possible later folding of
2776 component_refs against this constructor always yields something of the
2777 same type as the initial reference.
2778
2779 Note that the call to "build_template" above is still fine, because it
2780 will only refer to the provided template_type in this case. */
2781 return
2782 gnat_build_constructor
2783 (type, tree_cons (TYPE_FIELDS (type),
2784 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2785 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2786 template_addr, NULL_TREE)));
2787 }
2788 \f
2789 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2790 is something that is a fat pointer, so convert to it first if it EXPR
2791 is not already a fat pointer. */
2792
2793 static tree
2794 convert_to_thin_pointer (tree type, tree expr)
2795 {
2796 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2797 expr
2798 = convert_to_fat_pointer
2799 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2800
2801 /* We get the pointer to the data and use a NOP_EXPR to make it the
2802 proper GCC type. */
2803 expr
2804 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2805 expr = build1 (NOP_EXPR, type, expr);
2806
2807 return expr;
2808 }
2809 \f
2810 /* Create an expression whose value is that of EXPR,
2811 converted to type TYPE. The TREE_TYPE of the value
2812 is always TYPE. This function implements all reasonable
2813 conversions; callers should filter out those that are
2814 not permitted by the language being compiled. */
2815
2816 tree
2817 convert (tree type, tree expr)
2818 {
2819 enum tree_code code = TREE_CODE (type);
2820 tree etype = TREE_TYPE (expr);
2821 enum tree_code ecode = TREE_CODE (etype);
2822 tree tem;
2823
2824 /* If EXPR is already the right type, we are done. */
2825 if (type == etype)
2826 return expr;
2827 /* If we're converting between two aggregate types that have the same main
2828 variant, just make a NOP_EXPR. */
2829 else if (AGGREGATE_TYPE_P (type)
2830 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2831 return build1 (NOP_EXPR, type, expr);
2832 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2833 new one. */
2834 else if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2835 return build (WITH_RECORD_EXPR, type,
2836 convert (type, TREE_OPERAND (expr, 0)),
2837 TREE_OPERAND (expr, 1));
2838
2839 /* If the input type has padding, remove it by doing a component reference
2840 to the field. If the output type has padding, make a constructor
2841 to build the record. If both input and output have padding and are
2842 of variable size, do this as an unchecked conversion. */
2843 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2844 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2845 && (! TREE_CONSTANT (TYPE_SIZE (type))
2846 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2847 ;
2848 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2849 {
2850 /* If we have just converted to this padded type, just get
2851 the inner expression. */
2852 if (TREE_CODE (expr) == CONSTRUCTOR
2853 && CONSTRUCTOR_ELTS (expr) != 0
2854 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2855 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2856 else
2857 return convert (type, build_component_ref (expr, NULL_TREE,
2858 TYPE_FIELDS (etype), 0));
2859 }
2860 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2861 {
2862 /* If we previously converted from another type and our type is
2863 of variable size, remove the conversion to avoid the need for
2864 variable-size temporaries. */
2865 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2866 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2867 expr = TREE_OPERAND (expr, 0);
2868
2869 /* If we are just removing the padding from expr, convert the original
2870 object if we have variable size. That will avoid the need
2871 for some variable-size temporaries. */
2872 if (TREE_CODE (expr) == COMPONENT_REF
2873 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2874 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2875 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2876 return convert (type, TREE_OPERAND (expr, 0));
2877
2878 /* If the result type is a padded type with a self-referentially-sized
2879 field and the expression type is a record, do this as an
2880 unchecked converstion. */
2881 else if (TREE_CODE (etype) == RECORD_TYPE
2882 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2883 return unchecked_convert (type, expr, 0);
2884
2885 else
2886 return
2887 gnat_build_constructor (type,
2888 tree_cons (TYPE_FIELDS (type),
2889 convert (TREE_TYPE
2890 (TYPE_FIELDS (type)),
2891 expr),
2892 NULL_TREE));
2893 }
2894
2895 /* If the input is a biased type, adjust first. */
2896 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2897 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2898 fold (build1 (GNAT_NOP_EXPR,
2899 TREE_TYPE (etype), expr)),
2900 TYPE_MIN_VALUE (etype))));
2901
2902 /* If the input is a left-justified modular type, we need to extract
2903 the actual object before converting it to any other type with the
2904 exception of an unconstrained array. */
2905 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2906 && code != UNCONSTRAINED_ARRAY_TYPE)
2907 return convert (type, build_component_ref (expr, NULL_TREE,
2908 TYPE_FIELDS (etype), 0));
2909
2910 /* If converting to a type that contains a template, convert to the data
2911 type and then build the template. */
2912 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2913 {
2914 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2915
2916 /* If the source already has a template, get a reference to the
2917 associated array only, as we are going to rebuild a template
2918 for the target type anyway. */
2919 expr = maybe_unconstrained_array (expr);
2920
2921 return
2922 gnat_build_constructor
2923 (type,
2924 tree_cons (TYPE_FIELDS (type),
2925 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2926 obj_type, NULL_TREE),
2927 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2928 convert (obj_type, expr), NULL_TREE)));
2929 }
2930
2931 /* There are some special cases of expressions that we process
2932 specially. */
2933 switch (TREE_CODE (expr))
2934 {
2935 case ERROR_MARK:
2936 return expr;
2937
2938 case TRANSFORM_EXPR:
2939 case NULL_EXPR:
2940 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2941 conversion in gnat_expand_expr. NULL_EXPR does not represent
2942 and actual value, so no conversion is needed. */
2943 expr = copy_node (expr);
2944 TREE_TYPE (expr) = type;
2945 return expr;
2946
2947 case STRING_CST:
2948 case CONSTRUCTOR:
2949 /* If we are converting a STRING_CST to another constrained array type,
2950 just make a new one in the proper type. Likewise for a
2951 CONSTRUCTOR. */
2952 if (code == ecode && AGGREGATE_TYPE_P (etype)
2953 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2954 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2955 {
2956 expr = copy_node (expr);
2957 TREE_TYPE (expr) = type;
2958 return expr;
2959 }
2960 break;
2961
2962 case COMPONENT_REF:
2963 /* If we are converting between two aggregate types of the same
2964 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2965 This avoid unneeded conversions which makes reference computations
2966 more complex. */
2967 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2968 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2969 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2970 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
2971 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2972 TREE_OPERAND (expr, 1));
2973
2974 break;
2975
2976 case UNCONSTRAINED_ARRAY_REF:
2977 /* Convert this to the type of the inner array by getting the address of
2978 the array from the template. */
2979 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2980 build_component_ref (TREE_OPERAND (expr, 0),
2981 get_identifier ("P_ARRAY"),
2982 NULL_TREE, 0));
2983 etype = TREE_TYPE (expr);
2984 ecode = TREE_CODE (etype);
2985 break;
2986
2987 case VIEW_CONVERT_EXPR:
2988 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2989 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2990 return convert (type, TREE_OPERAND (expr, 0));
2991 break;
2992
2993 case INDIRECT_REF:
2994 /* If both types are record types, just convert the pointer and
2995 make a new INDIRECT_REF.
2996
2997 ??? Disable this for now since it causes problems with the
2998 code in build_binary_op for MODIFY_EXPR which wants to
2999 strip off conversions. But that code really is a mess and
3000 we need to do this a much better way some time. */
3001 if (0
3002 && (TREE_CODE (type) == RECORD_TYPE
3003 || TREE_CODE (type) == UNION_TYPE)
3004 && (TREE_CODE (etype) == RECORD_TYPE
3005 || TREE_CODE (etype) == UNION_TYPE)
3006 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3007 return build_unary_op (INDIRECT_REF, NULL_TREE,
3008 convert (build_pointer_type (type),
3009 TREE_OPERAND (expr, 0)));
3010 break;
3011
3012 default:
3013 break;
3014 }
3015
3016 /* Check for converting to a pointer to an unconstrained array. */
3017 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3018 return convert_to_fat_pointer (type, expr);
3019
3020 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3021 || (code == INTEGER_CST && ecode == INTEGER_CST
3022 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3023 return fold (build1 (NOP_EXPR, type, expr));
3024
3025 switch (code)
3026 {
3027 case VOID_TYPE:
3028 return build1 (CONVERT_EXPR, type, expr);
3029
3030 case INTEGER_TYPE:
3031 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3032 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3033 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3034 return unchecked_convert (type, expr, 0);
3035 else if (TYPE_BIASED_REPRESENTATION_P (type))
3036 return fold (build1 (CONVERT_EXPR, type,
3037 fold (build (MINUS_EXPR, TREE_TYPE (type),
3038 convert (TREE_TYPE (type), expr),
3039 TYPE_MIN_VALUE (type)))));
3040
3041 /* ... fall through ... */
3042
3043 case ENUMERAL_TYPE:
3044 return fold (convert_to_integer (type, expr));
3045
3046 case POINTER_TYPE:
3047 case REFERENCE_TYPE:
3048 /* If converting between two pointers to records denoting
3049 both a template and type, adjust if needed to account
3050 for any differing offsets, since one might be negative. */
3051 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3052 {
3053 tree bit_diff
3054 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3055 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3056 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3057 sbitsize_int (BITS_PER_UNIT));
3058
3059 expr = build1 (NOP_EXPR, type, expr);
3060 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3061 if (integer_zerop (byte_diff))
3062 return expr;
3063
3064 return build_binary_op (PLUS_EXPR, type, expr,
3065 fold (convert_to_pointer (type, byte_diff)));
3066 }
3067
3068 /* If converting to a thin pointer, handle specially. */
3069 if (TYPE_THIN_POINTER_P (type)
3070 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3071 return convert_to_thin_pointer (type, expr);
3072
3073 /* If converting fat pointer to normal pointer, get the pointer to the
3074 array and then convert it. */
3075 else if (TYPE_FAT_POINTER_P (etype))
3076 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3077 NULL_TREE, 0);
3078
3079 return fold (convert_to_pointer (type, expr));
3080
3081 case REAL_TYPE:
3082 return fold (convert_to_real (type, expr));
3083
3084 case RECORD_TYPE:
3085 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3086 return
3087 gnat_build_constructor
3088 (type, tree_cons (TYPE_FIELDS (type),
3089 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3090 NULL_TREE));
3091
3092 /* ... fall through ... */
3093
3094 case ARRAY_TYPE:
3095 /* In these cases, assume the front-end has validated the conversion.
3096 If the conversion is valid, it will be a bit-wise conversion, so
3097 it can be viewed as an unchecked conversion. */
3098 return unchecked_convert (type, expr, 0);
3099
3100 case UNION_TYPE:
3101 /* Just validate that the type is indeed that of a field
3102 of the type. Then make the simple conversion. */
3103 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3104 {
3105 if (TREE_TYPE (tem) == etype)
3106 return build1 (CONVERT_EXPR, type, expr);
3107 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3108 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3109 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3110 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3111 return build1 (CONVERT_EXPR, type,
3112 convert (TREE_TYPE (tem), expr));
3113 }
3114
3115 gigi_abort (413);
3116
3117 case UNCONSTRAINED_ARRAY_TYPE:
3118 /* If EXPR is a constrained array, take its address, convert it to a
3119 fat pointer, and then dereference it. Likewise if EXPR is a
3120 record containing both a template and a constrained array.
3121 Note that a record representing a left justified modular type
3122 always represents a packed constrained array. */
3123 if (ecode == ARRAY_TYPE
3124 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3125 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3126 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3127 return
3128 build_unary_op
3129 (INDIRECT_REF, NULL_TREE,
3130 convert_to_fat_pointer (TREE_TYPE (type),
3131 build_unary_op (ADDR_EXPR,
3132 NULL_TREE, expr)));
3133
3134 /* Do something very similar for converting one unconstrained
3135 array to another. */
3136 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3137 return
3138 build_unary_op (INDIRECT_REF, NULL_TREE,
3139 convert (TREE_TYPE (type),
3140 build_unary_op (ADDR_EXPR,
3141 NULL_TREE, expr)));
3142 else
3143 gigi_abort (409);
3144
3145 case COMPLEX_TYPE:
3146 return fold (convert_to_complex (type, expr));
3147
3148 default:
3149 gigi_abort (410);
3150 }
3151 }
3152 \f
3153 /* Remove all conversions that are done in EXP. This includes converting
3154 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3155 is nonzero, always return the address of the containing object even if
3156 the address is not bit-aligned. */
3157
3158 tree
3159 remove_conversions (tree exp, int true_address)
3160 {
3161 switch (TREE_CODE (exp))
3162 {
3163 case CONSTRUCTOR:
3164 if (true_address
3165 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3166 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3167 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3168 break;
3169
3170 case COMPONENT_REF:
3171 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3172 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3173 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3174 break;
3175
3176 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3177 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3178 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3179
3180 default:
3181 break;
3182 }
3183
3184 return exp;
3185 }
3186 \f
3187 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3188 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3189 likewise return an expression pointing to the underlying array. */
3190
3191 tree
3192 maybe_unconstrained_array (tree exp)
3193 {
3194 enum tree_code code = TREE_CODE (exp);
3195 tree new;
3196
3197 switch (TREE_CODE (TREE_TYPE (exp)))
3198 {
3199 case UNCONSTRAINED_ARRAY_TYPE:
3200 if (code == UNCONSTRAINED_ARRAY_REF)
3201 {
3202 new
3203 = build_unary_op (INDIRECT_REF, NULL_TREE,
3204 build_component_ref (TREE_OPERAND (exp, 0),
3205 get_identifier ("P_ARRAY"),
3206 NULL_TREE, 0));
3207 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3208 return new;
3209 }
3210
3211 else if (code == NULL_EXPR)
3212 return build1 (NULL_EXPR,
3213 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3214 (TREE_TYPE (TREE_TYPE (exp))))),
3215 TREE_OPERAND (exp, 0));
3216
3217 else if (code == WITH_RECORD_EXPR
3218 && (TREE_OPERAND (exp, 0)
3219 != (new = maybe_unconstrained_array
3220 (TREE_OPERAND (exp, 0)))))
3221 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3222 TREE_OPERAND (exp, 1));
3223
3224 case RECORD_TYPE:
3225 /* If this is a padded type, convert to the unpadded type and see if
3226 it contains a template. */
3227 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3228 {
3229 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3230 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3231 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3232 return
3233 build_component_ref (new, NULL_TREE,
3234 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3235 0);
3236 }
3237 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3238 return
3239 build_component_ref (exp, NULL_TREE,
3240 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3241 break;
3242
3243 default:
3244 break;
3245 }
3246
3247 return exp;
3248 }
3249 \f
3250 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3251 If NOTRUNC_P is set, truncation operations should be suppressed. */
3252
3253 tree
3254 unchecked_convert (tree type, tree expr, int notrunc_p)
3255 {
3256 tree etype = TREE_TYPE (expr);
3257
3258 /* If the expression is already the right type, we are done. */
3259 if (etype == type)
3260 return expr;
3261
3262 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3263 new one. */
3264 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3265 return build (WITH_RECORD_EXPR, type,
3266 unchecked_convert (type, TREE_OPERAND (expr, 0), notrunc_p),
3267 TREE_OPERAND (expr, 1));
3268
3269 /* If both types types are integral just do a normal conversion.
3270 Likewise for a conversion to an unconstrained array. */
3271 if ((((INTEGRAL_TYPE_P (type)
3272 && ! (TREE_CODE (type) == INTEGER_TYPE
3273 && TYPE_VAX_FLOATING_POINT_P (type)))
3274 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3275 || (TREE_CODE (type) == RECORD_TYPE
3276 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3277 && ((INTEGRAL_TYPE_P (etype)
3278 && ! (TREE_CODE (etype) == INTEGER_TYPE
3279 && TYPE_VAX_FLOATING_POINT_P (etype)))
3280 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3281 || (TREE_CODE (etype) == RECORD_TYPE
3282 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3283 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3284 {
3285 tree rtype = type;
3286
3287 if (TREE_CODE (etype) == INTEGER_TYPE
3288 && TYPE_BIASED_REPRESENTATION_P (etype))
3289 {
3290 tree ntype = copy_type (etype);
3291
3292 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3293 TYPE_MAIN_VARIANT (ntype) = ntype;
3294 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3295 }
3296
3297 if (TREE_CODE (type) == INTEGER_TYPE
3298 && TYPE_BIASED_REPRESENTATION_P (type))
3299 {
3300 rtype = copy_type (type);
3301 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3302 TYPE_MAIN_VARIANT (rtype) = rtype;
3303 }
3304
3305 expr = convert (rtype, expr);
3306 if (type != rtype)
3307 expr = build1 (GNAT_NOP_EXPR, type, expr);
3308 }
3309
3310 /* If we are converting TO an integral type whose precision is not the
3311 same as its size, first unchecked convert to a record that contains
3312 an object of the output type. Then extract the field. */
3313 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3314 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3315 GET_MODE_BITSIZE (TYPE_MODE (type))))
3316 {
3317 tree rec_type = make_node (RECORD_TYPE);
3318 tree field = create_field_decl (get_identifier ("OBJ"), type,
3319 rec_type, 1, 0, 0, 0);
3320
3321 TYPE_FIELDS (rec_type) = field;
3322 layout_type (rec_type);
3323
3324 expr = unchecked_convert (rec_type, expr, notrunc_p);
3325 expr = build_component_ref (expr, NULL_TREE, field, 0);
3326 }
3327
3328 /* Similarly for integral input type whose precision is not equal to its
3329 size. */
3330 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3331 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3332 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3333 {
3334 tree rec_type = make_node (RECORD_TYPE);
3335 tree field
3336 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3337 1, 0, 0, 0);
3338
3339 TYPE_FIELDS (rec_type) = field;
3340 layout_type (rec_type);
3341
3342 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3343 expr = unchecked_convert (type, expr, notrunc_p);
3344 }
3345
3346 /* We have a special case when we are converting between two
3347 unconstrained array types. In that case, take the address,
3348 convert the fat pointer types, and dereference. */
3349 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3350 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3351 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3352 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3353 build_unary_op (ADDR_EXPR, NULL_TREE,
3354 expr)));
3355 else
3356 {
3357 expr = maybe_unconstrained_array (expr);
3358 etype = TREE_TYPE (expr);
3359 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3360 }
3361
3362 /* If the result is an integral type whose size is not equal to
3363 the size of the underlying machine type, sign- or zero-extend
3364 the result. We need not do this in the case where the input is
3365 an integral type of the same precision and signedness or if the output
3366 is a biased type or if both the input and output are unsigned. */
3367 if (! notrunc_p
3368 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3369 && ! (TREE_CODE (type) == INTEGER_TYPE
3370 && TYPE_BIASED_REPRESENTATION_P (type))
3371 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3372 GET_MODE_BITSIZE (TYPE_MODE (type)))
3373 && ! (INTEGRAL_TYPE_P (etype)
3374 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3375 && operand_equal_p (TYPE_RM_SIZE (type),
3376 (TYPE_RM_SIZE (etype) != 0
3377 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3378 0))
3379 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3380 {
3381 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3382 TREE_UNSIGNED (type));
3383 tree shift_expr
3384 = convert (base_type,
3385 size_binop (MINUS_EXPR,
3386 bitsize_int
3387 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3388 TYPE_RM_SIZE (type)));
3389 expr
3390 = convert (type,
3391 build_binary_op (RSHIFT_EXPR, base_type,
3392 build_binary_op (LSHIFT_EXPR, base_type,
3393 convert (base_type, expr),
3394 shift_expr),
3395 shift_expr));
3396 }
3397
3398 /* An unchecked conversion should never raise Constraint_Error. The code
3399 below assumes that GCC's conversion routines overflow the same way that
3400 the underlying hardware does. This is probably true. In the rare case
3401 when it is false, we can rely on the fact that such conversions are
3402 erroneous anyway. */
3403 if (TREE_CODE (expr) == INTEGER_CST)
3404 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3405
3406 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3407 show no longer constant. */
3408 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3409 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3410 TREE_CONSTANT (expr) = 0;
3411
3412 return expr;
3413 }
3414
3415 #include "gt-ada-utils.h"
3416 #include "gtype-ada.h"