[multiple changes]
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41
42 #include "ada.h"
43 #include "types.h"
44 #include "atree.h"
45 #include "elists.h"
46 #include "namet.h"
47 #include "nlists.h"
48 #include "stringt.h"
49 #include "uintp.h"
50 #include "fe.h"
51 #include "sinfo.h"
52 #include "einfo.h"
53 #include "ada-tree.h"
54 #include "gigi.h"
55
56 #ifndef MAX_FIXED_MODE_SIZE
57 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
58 #endif
59
60 #ifndef MAX_BITS_PER_WORD
61 #define MAX_BITS_PER_WORD BITS_PER_WORD
62 #endif
63
64 /* If nonzero, pretend we are allocating at global level. */
65 int force_global;
66
67 /* Tree nodes for the various types and decls we create. */
68 tree gnat_std_decls[(int) ADT_LAST];
69
70 /* Functions to call for each of the possible raise reasons. */
71 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
72
73 /* Associates a GNAT tree node to a GCC tree node. It is used in
74 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
75 of `save_gnu_tree' for more info. */
76 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
77
78 /* This listhead is used to record any global objects that need elaboration.
79 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
80 initial value to assign. */
81
82 static GTY(()) tree pending_elaborations;
83
84 /* This stack allows us to momentarily switch to generating elaboration
85 lists for an inner context. */
86
87 struct e_stack GTY((chain_next ("%h.next"))) {
88 struct e_stack *next;
89 tree elab_list;
90 };
91 static GTY(()) struct e_stack *elist_stack;
92
93 /* This variable keeps a table for types for each precision so that we only
94 allocate each of them once. Signed and unsigned types are kept separate.
95
96 Note that these types are only used when fold-const requests something
97 special. Perhaps we should NOT share these types; we'll see how it
98 goes later. */
99 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
100
101 /* Likewise for float types, but record these by mode. */
102 static GTY(()) tree float_types[NUM_MACHINE_MODES];
103
104 /* For each binding contour we allocate a binding_level structure which records
105 the entities defined or declared in that contour. Contours include:
106
107 the global one
108 one for each subprogram definition
109 one for each compound statement (declare block)
110
111 Binding contours are used to create GCC tree BLOCK nodes. */
112
113 struct ada_binding_level GTY((chain_next ("%h.chain")))
114 {
115 /* The binding level containing this one (the enclosing binding level). */
116 struct ada_binding_level *chain;
117 /* The BLOCK node for this level. */
118 tree block;
119 };
120
121 /* The binding level currently in effect. */
122 static GTY(()) struct ada_binding_level *current_binding_level;
123
124 /* A chain of ada_binding_level structures awaiting reuse. */
125 static GTY((deletable)) struct ada_binding_level *free_binding_level;
126
127 /* A chain of unused BLOCK nodes. */
128 static GTY((deletable)) tree free_block_chain;
129
130 struct language_function GTY(())
131 {
132 int unused;
133 };
134
135 static tree merge_sizes (tree, tree, tree, int, int);
136 static tree compute_related_constant (tree, tree);
137 static tree split_plus (tree, tree *);
138 static int value_zerop (tree);
139 static tree float_type_for_precision (int, enum machine_mode);
140 static tree convert_to_fat_pointer (tree, tree);
141 static tree convert_to_thin_pointer (tree, tree);
142 static tree make_descriptor_field (const char *,tree, tree, tree);
143 static int value_factor_p (tree, int);
144 static int potential_alignment_gap (tree, tree, tree);
145 \f
146 /* Initialize the association of GNAT nodes to GCC trees. */
147
148 void
149 init_gnat_to_gnu (void)
150 {
151 associate_gnat_to_gnu
152 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
153
154 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
155 }
156
157 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
158 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
159 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
160
161 If GNU_DECL is zero, a previous association is to be reset. */
162
163 void
164 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
165 {
166 /* Check that GNAT_ENTITY is not already defined and that it is being set
167 to something which is a decl. Raise gigi 401 if not. Usually, this
168 means GNAT_ENTITY is defined twice, but occasionally is due to some
169 Gigi problem. */
170 if (gnu_decl
171 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
172 || (! no_check && ! DECL_P (gnu_decl))))
173 gigi_abort (401);
174
175 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
176 }
177
178 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
179 Return the ..._DECL node that was associated with it. If there is no tree
180 node associated with GNAT_ENTITY, abort.
181
182 In some cases, such as delayed elaboration or expressions that need to
183 be elaborated only once, GNAT_ENTITY is really not an entity. */
184
185 tree
186 get_gnu_tree (Entity_Id gnat_entity)
187 {
188 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
189 gigi_abort (402);
190
191 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
192 }
193
194 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
195
196 int
197 present_gnu_tree (Entity_Id gnat_entity)
198 {
199 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
200 }
201
202 \f
203 /* Return non-zero if we are currently in the global binding level. */
204
205 int
206 global_bindings_p (void)
207 {
208 return (force_global != 0 || current_binding_level->chain == 0 ? -1 : 0);
209 }
210
211 /* Return the list of declarations in the current level. Note that this list
212 is in reverse order (it has to be so for back-end compatibility). */
213
214 tree
215 getdecls (void)
216 {
217 return BLOCK_VARS (current_binding_level->block);
218 }
219
220 /* Enter a new binding level. */
221
222 void
223 gnat_pushlevel ()
224 {
225 struct ada_binding_level *newlevel = NULL;
226
227 /* Reuse a struct for this binding level, if there is one. */
228 if (free_binding_level)
229 {
230 newlevel = free_binding_level;
231 free_binding_level = free_binding_level->chain;
232 }
233 else
234 newlevel
235 = (struct ada_binding_level *)
236 ggc_alloc (sizeof (struct ada_binding_level));
237
238 /* Use a free BLOCK, if any; otherwise, allocate one. */
239 if (free_block_chain)
240 {
241 newlevel->block = free_block_chain;
242 free_block_chain = TREE_CHAIN (free_block_chain);
243 TREE_CHAIN (newlevel->block) = NULL_TREE;
244 }
245 else
246 newlevel->block = make_node (BLOCK);
247
248 /* Point the BLOCK we just made to its parent. */
249 if (current_binding_level)
250 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
251
252 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
253
254 /* Add this level to the front of the chain (stack) of levels that are
255 active. */
256 newlevel->chain = current_binding_level;
257 current_binding_level = newlevel;
258 }
259
260 /* Exit a binding level. Return the BLOCK node, if any. */
261
262 tree
263 gnat_poplevel ()
264 {
265 struct ada_binding_level *level = current_binding_level;
266 tree block = level->block;
267 tree decl;
268
269 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
270 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
271
272 /* Output any nested inline functions within this block which must be
273 compiled because their address is needed. */
274 for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
275 if (TREE_CODE (decl) == FUNCTION_DECL
276 && ! TREE_ASM_WRITTEN (decl) && TREE_ADDRESSABLE (decl)
277 && DECL_INITIAL (decl) != 0)
278 {
279 push_function_context ();
280 /* ??? This is temporary. */
281 ggc_push_context ();
282 output_inline_function (decl);
283 ggc_pop_context ();
284 pop_function_context ();
285 }
286
287 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
288 are no variables free the block and merge its subblocks into those of its
289 parent block. Otherwise, add it to the list of its parent. */
290 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
291 ;
292 else if (BLOCK_VARS (block) == NULL_TREE)
293 {
294 BLOCK_SUBBLOCKS (level->chain->block)
295 = chainon (BLOCK_SUBBLOCKS (block),
296 BLOCK_SUBBLOCKS (level->chain->block));
297 TREE_CHAIN (block) = free_block_chain;
298 free_block_chain = block;
299 block = NULL_TREE;
300 }
301 else
302 {
303 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
304 BLOCK_SUBBLOCKS (level->chain->block) = block;
305 TREE_USED (block) = 1;
306 }
307
308 /* Free this binding structure. */
309 current_binding_level = level->chain;
310 level->chain = free_binding_level;
311 free_binding_level = level;
312 return block;
313 }
314
315 /* Insert BLOCK at the end of the list of subblocks of the
316 current binding level. This is used when a BIND_EXPR is expanded,
317 to handle the BLOCK node inside the BIND_EXPR. */
318
319 void
320 insert_block (tree block)
321 {
322 TREE_USED (block) = 1;
323 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
324 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
325 }
326
327 /* Return nonzero if the current binding has any variables. This means
328 it will have a BLOCK node. */
329
330 int
331 block_has_vars ()
332 {
333 return BLOCK_VARS (current_binding_level->block) != 0;
334 }
335 \f
336 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
337 Returns the ..._DECL node. */
338
339 tree
340 pushdecl (tree decl)
341 {
342 /* If at top level, there is no context. But PARM_DECLs always go in the
343 level of its function. */
344 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
345 DECL_CONTEXT (decl) = 0;
346 else
347 DECL_CONTEXT (decl) = current_function_decl;
348
349 /* Put the declaration on the list. The list of declarations is in reverse
350 order. The list will be reversed later.
351
352 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
353 will cause trouble with the debugger and aren't needed anyway. */
354 if (TREE_CODE (decl) != TYPE_DECL
355 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
356 {
357 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
358 BLOCK_VARS (current_binding_level->block) = decl;
359 }
360
361 /* For the declaration of a type, set its name if it either is not already
362 set, was set to an IDENTIFIER_NODE, indicating an internal name,
363 or if the previous type name was not derived from a source name.
364 We'd rather have the type named with a real name and all the pointer
365 types to the same object have the same POINTER_TYPE node. Code in this
366 function in c-decl.c makes a copy of the type node here, but that may
367 cause us trouble with incomplete types, so let's not try it (at least
368 for now). */
369
370 if (TREE_CODE (decl) == TYPE_DECL
371 && DECL_NAME (decl) != 0
372 && (TYPE_NAME (TREE_TYPE (decl)) == 0
373 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
374 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
375 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
376 && ! DECL_ARTIFICIAL (decl))))
377 TYPE_NAME (TREE_TYPE (decl)) = decl;
378
379 return decl;
380 }
381 \f
382 /* Do little here. Set up the standard declarations later after the
383 front end has been run. */
384
385 void
386 gnat_init_decl_processing (void)
387 {
388 input_line = 0;
389
390 /* Make the binding_level structure for global names. */
391 current_function_decl = 0;
392 current_binding_level = 0;
393 free_binding_level = 0;
394 gnat_pushlevel ();
395
396 build_common_tree_nodes (0);
397
398 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
399 corresponding to the size of Pmode. In most cases when ptr_mode and
400 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
401 far better code using the width of Pmode. Make this here since we need
402 this before we can expand the GNAT types. */
403 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
404 build_common_tree_nodes_2 (0);
405
406 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
407
408 /* We need to make the integer type before doing anything else.
409 We stitch this in to the appropriate GNAT type later. */
410 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
411 integer_type_node));
412 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
413 char_type_node));
414
415 ptr_void_type_node = build_pointer_type (void_type_node);
416
417 }
418
419 /* Create the predefined scalar types such as `integer_type_node' needed
420 in the gcc back-end and initialize the global binding level. */
421
422 void
423 init_gigi_decls (tree long_long_float_type, tree exception_type)
424 {
425 tree endlink, decl;
426 unsigned int i;
427
428 /* Set the types that GCC and Gigi use from the front end. We would like
429 to do this for char_type_node, but it needs to correspond to the C
430 char type. */
431 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
432 {
433 /* In this case, the builtin floating point types are VAX float,
434 so make up a type for use. */
435 longest_float_type_node = make_node (REAL_TYPE);
436 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
437 layout_type (longest_float_type_node);
438 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
439 longest_float_type_node));
440 }
441 else
442 longest_float_type_node = TREE_TYPE (long_long_float_type);
443
444 except_type_node = TREE_TYPE (exception_type);
445
446 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
447 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
448 unsigned_type_node));
449
450 void_type_decl_node
451 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
452 void_type_node));
453
454 void_ftype = build_function_type (void_type_node, NULL_TREE);
455 ptr_void_ftype = build_pointer_type (void_ftype);
456
457 /* Now declare runtime functions. */
458 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
459
460 /* malloc is a function declaration tree for a function to allocate
461 memory. */
462 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
463 NULL_TREE,
464 build_function_type (ptr_void_type_node,
465 tree_cons (NULL_TREE,
466 sizetype,
467 endlink)),
468 NULL_TREE, 0, 1, 1, 0);
469
470 /* free is a function declaration tree for a function to free memory. */
471 free_decl
472 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
473 build_function_type (void_type_node,
474 tree_cons (NULL_TREE,
475 ptr_void_type_node,
476 endlink)),
477 NULL_TREE, 0, 1, 1, 0);
478
479 /* Make the types and functions used for exception processing. */
480 jmpbuf_type
481 = build_array_type (gnat_type_for_mode (Pmode, 0),
482 build_index_type (build_int_2 (5, 0)));
483 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
484 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
485
486 /* Functions to get and set the jumpbuf pointer for the current thread. */
487 get_jmpbuf_decl
488 = create_subprog_decl
489 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
490 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
491 NULL_TREE, 0, 1, 1, 0);
492
493 set_jmpbuf_decl
494 = create_subprog_decl
495 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
496 NULL_TREE,
497 build_function_type (void_type_node,
498 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
499 NULL_TREE, 0, 1, 1, 0);
500
501 /* Function to get the current exception. */
502 get_excptr_decl
503 = create_subprog_decl
504 (get_identifier ("system__soft_links__get_gnat_exception"),
505 NULL_TREE,
506 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
507 NULL_TREE, 0, 1, 1, 0);
508
509 /* Functions that raise exceptions. */
510 raise_nodefer_decl
511 = create_subprog_decl
512 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
513 build_function_type (void_type_node,
514 tree_cons (NULL_TREE,
515 build_pointer_type (except_type_node),
516 endlink)),
517 NULL_TREE, 0, 1, 1, 0);
518
519 /* Hooks to call when entering/leaving an exception handler. */
520 begin_handler_decl
521 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
522 build_function_type (void_type_node,
523 tree_cons (NULL_TREE,
524 ptr_void_type_node,
525 endlink)),
526 NULL_TREE, 0, 1, 1, 0);
527
528 end_handler_decl
529 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
530 build_function_type (void_type_node,
531 tree_cons (NULL_TREE,
532 ptr_void_type_node,
533 endlink)),
534 NULL_TREE, 0, 1, 1, 0);
535
536 /* If in no exception handlers mode, all raise statements are redirected to
537 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
538 this procedure will never be called in this mode. */
539 if (No_Exception_Handlers_Set ())
540 {
541 decl
542 = create_subprog_decl
543 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
544 build_function_type (void_type_node,
545 tree_cons (NULL_TREE,
546 build_pointer_type (char_type_node),
547 tree_cons (NULL_TREE,
548 integer_type_node,
549 endlink))),
550 NULL_TREE, 0, 1, 1, 0);
551
552 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
553 gnat_raise_decls[i] = decl;
554 }
555 else
556 /* Otherwise, make one decl for each exception reason. */
557 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
558 {
559 char name[17];
560
561 sprintf (name, "__gnat_rcheck_%.2d", i);
562 gnat_raise_decls[i]
563 = create_subprog_decl
564 (get_identifier (name), NULL_TREE,
565 build_function_type (void_type_node,
566 tree_cons (NULL_TREE,
567 build_pointer_type
568 (char_type_node),
569 tree_cons (NULL_TREE,
570 integer_type_node,
571 endlink))),
572 NULL_TREE, 0, 1, 1, 0);
573 }
574
575 /* Indicate that these never return. */
576 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
577 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
578 TREE_TYPE (raise_nodefer_decl)
579 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
580 TYPE_QUAL_VOLATILE);
581
582 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
583 {
584 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
585 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
586 TREE_TYPE (gnat_raise_decls[i])
587 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
588 TYPE_QUAL_VOLATILE);
589 }
590
591 /* setjmp returns an integer and has one operand, which is a pointer to
592 a jmpbuf. */
593 setjmp_decl
594 = create_subprog_decl
595 (get_identifier ("__builtin_setjmp"), NULL_TREE,
596 build_function_type (integer_type_node,
597 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
598 NULL_TREE, 0, 1, 1, 0);
599
600 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
601 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
602
603 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
604 address. */
605 update_setjmp_buf_decl
606 = create_subprog_decl
607 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
610 NULL_TREE, 0, 1, 1, 0);
611
612 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
613 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
614
615 main_identifier_node = get_identifier ("main");
616 }
617 \f
618 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
619 nodes (FIELDLIST), finish constructing the record or union type.
620 If HAS_REP is nonzero, this record has a rep clause; don't call
621 layout_type but merely set the size and alignment ourselves.
622 If DEFER_DEBUG is nonzero, do not call the debugging routines
623 on this type; it will be done later. */
624
625 void
626 finish_record_type (tree record_type,
627 tree fieldlist,
628 int has_rep,
629 int defer_debug)
630 {
631 enum tree_code code = TREE_CODE (record_type);
632 tree ada_size = bitsize_zero_node;
633 tree size = bitsize_zero_node;
634 tree size_unit = size_zero_node;
635 int var_size = 0;
636 tree field;
637
638 TYPE_FIELDS (record_type) = fieldlist;
639
640 if (TYPE_NAME (record_type) != 0
641 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
642 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
643 else
644 TYPE_STUB_DECL (record_type)
645 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
646 record_type));
647
648 /* We don't need both the typedef name and the record name output in
649 the debugging information, since they are the same. */
650 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
651
652 /* Globally initialize the record first. If this is a rep'ed record,
653 that just means some initializations; otherwise, layout the record. */
654
655 if (has_rep)
656 {
657 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
658 TYPE_MODE (record_type) = BLKmode;
659 if (TYPE_SIZE (record_type) == 0)
660 {
661 TYPE_SIZE (record_type) = bitsize_zero_node;
662 TYPE_SIZE_UNIT (record_type) = size_zero_node;
663 }
664 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
665 out just like a UNION_TYPE, since the size will be fixed. */
666 else if (code == QUAL_UNION_TYPE)
667 code = UNION_TYPE;
668 }
669 else
670 {
671 /* Ensure there isn't a size already set. There can be in an error
672 case where there is a rep clause but all fields have errors and
673 no longer have a position. */
674 TYPE_SIZE (record_type) = 0;
675 layout_type (record_type);
676 }
677
678 /* At this point, the position and size of each field is known. It was
679 either set before entry by a rep clause, or by laying out the type above.
680
681 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
682 to compute the Ada size; the GCC size and alignment (for rep'ed records
683 that are not padding types); and the mode (for rep'ed records). We also
684 clear the DECL_BIT_FIELD indication for the cases we know have not been
685 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
686
687 if (code == QUAL_UNION_TYPE)
688 fieldlist = nreverse (fieldlist);
689
690 for (field = fieldlist; field; field = TREE_CHAIN (field))
691 {
692 tree pos = bit_position (field);
693
694 tree type = TREE_TYPE (field);
695 tree this_size = DECL_SIZE (field);
696 tree this_size_unit = DECL_SIZE_UNIT (field);
697 tree this_ada_size = DECL_SIZE (field);
698
699 /* We need to make an XVE/XVU record if any field has variable size,
700 whether or not the record does. For example, if we have an union,
701 it may be that all fields, rounded up to the alignment, have the
702 same size, in which case we'll use that size. But the debug
703 output routines (except Dwarf2) won't be able to output the fields,
704 so we need to make the special record. */
705 if (TREE_CODE (this_size) != INTEGER_CST)
706 var_size = 1;
707
708 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
709 || TREE_CODE (type) == QUAL_UNION_TYPE)
710 && ! TYPE_IS_FAT_POINTER_P (type)
711 && ! TYPE_CONTAINS_TEMPLATE_P (type)
712 && TYPE_ADA_SIZE (type) != 0)
713 this_ada_size = TYPE_ADA_SIZE (type);
714
715 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
716 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
717 && value_factor_p (pos, BITS_PER_UNIT)
718 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
719 DECL_BIT_FIELD (field) = 0;
720
721 /* If we still have DECL_BIT_FIELD set at this point, we know the field
722 is technically not addressable. Except that it can actually be
723 addressed if the field is BLKmode and happens to be properly
724 aligned. */
725 DECL_NONADDRESSABLE_P (field)
726 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
727
728 if (has_rep && ! DECL_BIT_FIELD (field))
729 TYPE_ALIGN (record_type)
730 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
731
732 switch (code)
733 {
734 case UNION_TYPE:
735 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
736 size = size_binop (MAX_EXPR, size, this_size);
737 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
738 break;
739
740 case QUAL_UNION_TYPE:
741 ada_size
742 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
743 this_ada_size, ada_size));
744 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
745 this_size, size));
746 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
747 this_size_unit, size_unit));
748 break;
749
750 case RECORD_TYPE:
751 /* Since we know here that all fields are sorted in order of
752 increasing bit position, the size of the record is one
753 higher than the ending bit of the last field processed
754 unless we have a rep clause, since in that case we might
755 have a field outside a QUAL_UNION_TYPE that has a higher ending
756 position. So use a MAX in that case. Also, if this field is a
757 QUAL_UNION_TYPE, we need to take into account the previous size in
758 the case of empty variants. */
759 ada_size
760 = merge_sizes (ada_size, pos, this_ada_size,
761 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
762 size = merge_sizes (size, pos, this_size,
763 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
764 size_unit
765 = merge_sizes (size_unit, byte_position (field), this_size_unit,
766 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
767 break;
768
769 default:
770 abort ();
771 }
772 }
773
774 if (code == QUAL_UNION_TYPE)
775 nreverse (fieldlist);
776
777 /* If this is a padding record, we never want to make the size smaller than
778 what was specified in it, if any. */
779 if (TREE_CODE (record_type) == RECORD_TYPE
780 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
781 {
782 size = TYPE_SIZE (record_type);
783 size_unit = TYPE_SIZE_UNIT (record_type);
784 }
785
786 /* Now set any of the values we've just computed that apply. */
787 if (! TYPE_IS_FAT_POINTER_P (record_type)
788 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
789 SET_TYPE_ADA_SIZE (record_type, ada_size);
790
791 if (has_rep)
792 {
793 if (! (TREE_CODE (record_type) == RECORD_TYPE
794 && TYPE_IS_PADDING_P (record_type)
795 && CONTAINS_PLACEHOLDER_P (size)))
796 {
797 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
798 TYPE_SIZE_UNIT (record_type)
799 = round_up (size_unit,
800 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
801 }
802
803 compute_record_mode (record_type);
804 }
805
806 if (! defer_debug)
807 {
808 /* If this record is of variable size, rename it so that the
809 debugger knows it is and make a new, parallel, record
810 that tells the debugger how the record is laid out. See
811 exp_dbug.ads. But don't do this for records that are padding
812 since they confuse GDB. */
813 if (var_size
814 && ! (TREE_CODE (record_type) == RECORD_TYPE
815 && TYPE_IS_PADDING_P (record_type)))
816 {
817 tree new_record_type
818 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
819 ? UNION_TYPE : TREE_CODE (record_type));
820 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
821 tree new_id
822 = concat_id_with_name (orig_id,
823 TREE_CODE (record_type) == QUAL_UNION_TYPE
824 ? "XVU" : "XVE");
825 tree last_pos = bitsize_zero_node;
826 tree old_field;
827 tree prev_old_field = 0;
828
829 TYPE_NAME (new_record_type) = new_id;
830 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
831 TYPE_STUB_DECL (new_record_type)
832 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
833 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
834 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
835 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
836 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
837
838 /* Now scan all the fields, replacing each field with a new
839 field corresponding to the new encoding. */
840 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
841 old_field = TREE_CHAIN (old_field))
842 {
843 tree field_type = TREE_TYPE (old_field);
844 tree field_name = DECL_NAME (old_field);
845 tree new_field;
846 tree curpos = bit_position (old_field);
847 int var = 0;
848 unsigned int align = 0;
849 tree pos;
850
851 /* See how the position was modified from the last position.
852
853 There are two basic cases we support: a value was added
854 to the last position or the last position was rounded to
855 a boundary and they something was added. Check for the
856 first case first. If not, see if there is any evidence
857 of rounding. If so, round the last position and try
858 again.
859
860 If this is a union, the position can be taken as zero. */
861
862 if (TREE_CODE (new_record_type) == UNION_TYPE)
863 pos = bitsize_zero_node, align = 0;
864 else
865 pos = compute_related_constant (curpos, last_pos);
866
867 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
868 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
869 {
870 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
871 pos = compute_related_constant (curpos,
872 round_up (last_pos, align));
873 }
874 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
875 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
876 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
877 && host_integerp (TREE_OPERAND
878 (TREE_OPERAND (curpos, 0), 1),
879 1))
880 {
881 align
882 = tree_low_cst
883 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
884 pos = compute_related_constant (curpos,
885 round_up (last_pos, align));
886 }
887 else if (potential_alignment_gap (prev_old_field, old_field,
888 pos))
889 {
890 align = TYPE_ALIGN (field_type);
891 pos = compute_related_constant (curpos,
892 round_up (last_pos, align));
893 }
894
895 /* If we can't compute a position, set it to zero.
896
897 ??? We really should abort here, but it's too much work
898 to get this correct for all cases. */
899
900 if (pos == 0)
901 pos = bitsize_zero_node;
902
903 /* See if this type is variable-size and make a new type
904 and indicate the indirection if so. */
905 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
906 {
907 field_type = build_pointer_type (field_type);
908 var = 1;
909 }
910
911 /* Make a new field name, if necessary. */
912 if (var || align != 0)
913 {
914 char suffix[6];
915
916 if (align != 0)
917 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
918 align / BITS_PER_UNIT);
919 else
920 strcpy (suffix, "XVL");
921
922 field_name = concat_id_with_name (field_name, suffix);
923 }
924
925 new_field = create_field_decl (field_name, field_type,
926 new_record_type, 0,
927 DECL_SIZE (old_field), pos, 0);
928 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
929 TYPE_FIELDS (new_record_type) = new_field;
930
931 /* If old_field is a QUAL_UNION_TYPE, take its size as being
932 zero. The only time it's not the last field of the record
933 is when there are other components at fixed positions after
934 it (meaning there was a rep clause for every field) and we
935 want to be able to encode them. */
936 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
937 (TREE_CODE (TREE_TYPE (old_field))
938 == QUAL_UNION_TYPE)
939 ? bitsize_zero_node
940 : DECL_SIZE (old_field));
941 prev_old_field = old_field;
942 }
943
944 TYPE_FIELDS (new_record_type)
945 = nreverse (TYPE_FIELDS (new_record_type));
946
947 rest_of_type_compilation (new_record_type, global_bindings_p ());
948 }
949
950 rest_of_type_compilation (record_type, global_bindings_p ());
951 }
952 }
953
954 /* Utility function of above to merge LAST_SIZE, the previous size of a record
955 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
956 if this represents a QUAL_UNION_TYPE in which case we must look for
957 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
958 is nonzero, we must take the MAX of the end position of this field
959 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
960
961 We return an expression for the size. */
962
963 static tree
964 merge_sizes (tree last_size,
965 tree first_bit,
966 tree size,
967 int special,
968 int has_rep)
969 {
970 tree type = TREE_TYPE (last_size);
971 tree new;
972
973 if (! special || TREE_CODE (size) != COND_EXPR)
974 {
975 new = size_binop (PLUS_EXPR, first_bit, size);
976 if (has_rep)
977 new = size_binop (MAX_EXPR, last_size, new);
978 }
979
980 else
981 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
982 integer_zerop (TREE_OPERAND (size, 1))
983 ? last_size : merge_sizes (last_size, first_bit,
984 TREE_OPERAND (size, 1),
985 1, has_rep),
986 integer_zerop (TREE_OPERAND (size, 2))
987 ? last_size : merge_sizes (last_size, first_bit,
988 TREE_OPERAND (size, 2),
989 1, has_rep)));
990
991 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
992 when fed through substitute_in_expr) into thinking that a constant
993 size is not constant. */
994 while (TREE_CODE (new) == NON_LVALUE_EXPR)
995 new = TREE_OPERAND (new, 0);
996
997 return new;
998 }
999
1000 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1001 related by the addition of a constant. Return that constant if so. */
1002
1003 static tree
1004 compute_related_constant (tree op0, tree op1)
1005 {
1006 tree op0_var, op1_var;
1007 tree op0_con = split_plus (op0, &op0_var);
1008 tree op1_con = split_plus (op1, &op1_var);
1009 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1010
1011 if (operand_equal_p (op0_var, op1_var, 0))
1012 return result;
1013 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1014 return result;
1015 else
1016 return 0;
1017 }
1018
1019 /* Utility function of above to split a tree OP which may be a sum, into a
1020 constant part, which is returned, and a variable part, which is stored
1021 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1022 bitsizetype. */
1023
1024 static tree
1025 split_plus (tree in, tree *pvar)
1026 {
1027 /* Strip NOPS in order to ease the tree traversal and maximize the
1028 potential for constant or plus/minus discovery. We need to be careful
1029 to always return and set *pvar to bitsizetype trees, but it's worth
1030 the effort. */
1031 STRIP_NOPS (in);
1032
1033 *pvar = convert (bitsizetype, in);
1034
1035 if (TREE_CODE (in) == INTEGER_CST)
1036 {
1037 *pvar = bitsize_zero_node;
1038 return convert (bitsizetype, in);
1039 }
1040 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1041 {
1042 tree lhs_var, rhs_var;
1043 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1044 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1045
1046 if (lhs_var == TREE_OPERAND (in, 0)
1047 && rhs_var == TREE_OPERAND (in, 1))
1048 return bitsize_zero_node;
1049
1050 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1051 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1052 }
1053 else
1054 return bitsize_zero_node;
1055 }
1056 \f
1057 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1058 subprogram. If it is void_type_node, then we are dealing with a procedure,
1059 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1060 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1061 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1062 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1063 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1064 RETURNS_WITH_DSP is nonzero if the function is to return with a
1065 depressed stack pointer. */
1066
1067 tree
1068 create_subprog_type (tree return_type,
1069 tree param_decl_list,
1070 tree cico_list,
1071 int returns_unconstrained,
1072 int returns_by_ref,
1073 int returns_with_dsp)
1074 {
1075 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1076 the subprogram formal parameters. This list is generated by traversing the
1077 input list of PARM_DECL nodes. */
1078 tree param_type_list = NULL;
1079 tree param_decl;
1080 tree type;
1081
1082 for (param_decl = param_decl_list; param_decl;
1083 param_decl = TREE_CHAIN (param_decl))
1084 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1085 param_type_list);
1086
1087 /* The list of the function parameter types has to be terminated by the void
1088 type to signal to the back-end that we are not dealing with a variable
1089 parameter subprogram, but that the subprogram has a fixed number of
1090 parameters. */
1091 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1092
1093 /* The list of argument types has been created in reverse
1094 so nreverse it. */
1095 param_type_list = nreverse (param_type_list);
1096
1097 type = build_function_type (return_type, param_type_list);
1098
1099 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1100 or the new type should, make a copy of TYPE. Likewise for
1101 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1102 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1103 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1104 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1105 type = copy_type (type);
1106
1107 SET_TYPE_CI_CO_LIST (type, cico_list);
1108 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1109 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1110 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1111 return type;
1112 }
1113 \f
1114 /* Return a copy of TYPE but safe to modify in any way. */
1115
1116 tree
1117 copy_type (tree type)
1118 {
1119 tree new = copy_node (type);
1120
1121 /* copy_node clears this field instead of copying it, because it is
1122 aliased with TREE_CHAIN. */
1123 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1124
1125 TYPE_POINTER_TO (new) = 0;
1126 TYPE_REFERENCE_TO (new) = 0;
1127 TYPE_MAIN_VARIANT (new) = new;
1128 TYPE_NEXT_VARIANT (new) = 0;
1129
1130 return new;
1131 }
1132 \f
1133 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1134 TYPE_INDEX_TYPE is INDEX. */
1135
1136 tree
1137 create_index_type (tree min, tree max, tree index)
1138 {
1139 /* First build a type for the desired range. */
1140 tree type = build_index_2_type (min, max);
1141
1142 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1143 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1144 is set, but not to INDEX, make a copy of this type with the requested
1145 index type. Note that we have no way of sharing these types, but that's
1146 only a small hole. */
1147 if (TYPE_INDEX_TYPE (type) == index)
1148 return type;
1149 else if (TYPE_INDEX_TYPE (type) != 0)
1150 type = copy_type (type);
1151
1152 SET_TYPE_INDEX_TYPE (type, index);
1153 return type;
1154 }
1155 \f
1156 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1157 string) and TYPE is a ..._TYPE node giving its data type.
1158 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1159 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1160 information about this type. */
1161
1162 tree
1163 create_type_decl (tree type_name,
1164 tree type,
1165 struct attrib *attr_list,
1166 int artificial_p,
1167 int debug_info_p)
1168 {
1169 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1170 enum tree_code code = TREE_CODE (type);
1171
1172 DECL_ARTIFICIAL (type_decl) = artificial_p;
1173 pushdecl (type_decl);
1174 process_attributes (type_decl, attr_list);
1175
1176 /* Pass type declaration information to the debugger unless this is an
1177 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1178 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1179 a dummy type, which will be completed later, or a type for which
1180 debugging information was not requested. */
1181 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1182 || ! debug_info_p)
1183 DECL_IGNORED_P (type_decl) = 1;
1184 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1185 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1186 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1187 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1188
1189 return type_decl;
1190 }
1191
1192 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1193 ASM_NAME is its assembler name (if provided). TYPE is its data type
1194 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1195 expression; NULL_TREE if none.
1196
1197 CONST_FLAG is nonzero if this variable is constant.
1198
1199 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1200 the current compilation unit. This flag should be set when processing the
1201 variable definitions in a package specification. EXTERN_FLAG is nonzero
1202 when processing an external variable declaration (as opposed to a
1203 definition: no storage is to be allocated for the variable here).
1204
1205 STATIC_FLAG is only relevant when not at top level. In that case
1206 it indicates whether to always allocate storage to the variable. */
1207
1208 tree
1209 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1210 int const_flag, int public_flag, int extern_flag,
1211 int static_flag, struct attrib *attr_list)
1212 {
1213 int init_const
1214 = (var_init == 0
1215 ? 0
1216 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1217 && (global_bindings_p () || static_flag
1218 ? 0 != initializer_constant_valid_p (var_init,
1219 TREE_TYPE (var_init))
1220 : TREE_CONSTANT (var_init))));
1221 tree var_decl
1222 = build_decl ((const_flag && init_const
1223 /* Only make a CONST_DECL for sufficiently-small objects.
1224 We consider complex double "sufficiently-small" */
1225 && TYPE_SIZE (type) != 0
1226 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1227 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1228 GET_MODE_SIZE (DCmode)))
1229 ? CONST_DECL : VAR_DECL, var_name, type);
1230
1231 /* If this is external, throw away any initializations unless this is a
1232 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1233 we are defining a global here, leave a constant initialization and save
1234 any variable elaborations for the elaboration routine. Otherwise, if
1235 the initializing expression is not the same as TYPE, generate the
1236 initialization with an assignment statement, since it knows how
1237 to do the required adjustents. If we are just annotating types,
1238 throw away the initialization if it isn't a constant. */
1239
1240 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1241 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1242 var_init = 0;
1243
1244 if (global_bindings_p () && var_init != 0 && ! init_const)
1245 {
1246 add_pending_elaborations (var_decl, var_init);
1247 var_init = 0;
1248 }
1249
1250 else if (var_init != 0
1251 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1252 != TYPE_MAIN_VARIANT (type))
1253 || (static_flag && ! init_const)))
1254 DECL_INIT_BY_ASSIGN_P (var_decl) = 1;
1255
1256 DECL_INITIAL (var_decl) = var_init;
1257 TREE_READONLY (var_decl) = const_flag;
1258 DECL_EXTERNAL (var_decl) = extern_flag;
1259 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1260 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1261 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1262 = TYPE_VOLATILE (type);
1263
1264 /* At the global binding level we need to allocate static storage for the
1265 variable if and only if its not external. If we are not at the top level
1266 we allocate automatic storage unless requested not to. */
1267 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1268
1269 if (asm_name != 0)
1270 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1271
1272 process_attributes (var_decl, attr_list);
1273
1274 /* Add this decl to the current binding level and generate any
1275 needed code and RTL. */
1276 var_decl = pushdecl (var_decl);
1277
1278 if (TREE_SIDE_EFFECTS (var_decl))
1279 TREE_ADDRESSABLE (var_decl) = 1;
1280
1281 if (TREE_CODE (var_decl) != CONST_DECL)
1282 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1283
1284 return var_decl;
1285 }
1286 \f
1287 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1288 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1289 this field is in a record type with a "pragma pack". If SIZE is nonzero
1290 it is the specified size for this field. If POS is nonzero, it is the bit
1291 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1292 the address of this field for aliasing purposes. */
1293
1294 tree
1295 create_field_decl (tree field_name,
1296 tree field_type,
1297 tree record_type,
1298 int packed,
1299 tree size,
1300 tree pos,
1301 int addressable)
1302 {
1303 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1304
1305 DECL_CONTEXT (field_decl) = record_type;
1306 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1307
1308 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1309 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1310 if (packed && TYPE_MODE (field_type) == BLKmode)
1311 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1312
1313 /* If a size is specified, use it. Otherwise, if the record type is packed
1314 compute a size to use, which may differ from the object's natural size.
1315 We always set a size in this case to trigger the checks for bitfield
1316 creation below, which is typically required when no position has been
1317 specified. */
1318 if (size != 0)
1319 size = convert (bitsizetype, size);
1320 else if (packed == 1)
1321 {
1322 size = rm_size (field_type);
1323
1324 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1325 byte. */
1326 if (TREE_CODE (size) == INTEGER_CST
1327 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1328 size = round_up (size, BITS_PER_UNIT);
1329 }
1330
1331 /* Make a bitfield if a size is specified for two reasons: first if the size
1332 differs from the natural size. Second, if the alignment is insufficient.
1333 There are a number of ways the latter can be true.
1334
1335 We never make a bitfield if the type of the field has a nonconstant size,
1336 or if it is claimed to be addressable, because no such entity requiring
1337 bitfield operations should reach here.
1338
1339 We do *preventively* make a bitfield when there might be the need for it
1340 but we don't have all the necessary information to decide, as is the case
1341 of a field with no specified position in a packed record.
1342
1343 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1344 in layout_decl or finish_record_type to clear the bit_field indication if
1345 it is in fact not needed. */
1346 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1347 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1348 && ! addressable
1349 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1350 || (pos != 0
1351 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1352 bitsize_int (TYPE_ALIGN
1353 (field_type)))))
1354 || packed
1355 || (TYPE_ALIGN (record_type) != 0
1356 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1357 {
1358 DECL_BIT_FIELD (field_decl) = 1;
1359 DECL_SIZE (field_decl) = size;
1360 if (! packed && pos == 0)
1361 DECL_ALIGN (field_decl)
1362 = (TYPE_ALIGN (record_type) != 0
1363 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1364 : TYPE_ALIGN (field_type));
1365 }
1366
1367 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1368 DECL_ALIGN (field_decl)
1369 = MAX (DECL_ALIGN (field_decl),
1370 DECL_BIT_FIELD (field_decl) ? 1
1371 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1372 : TYPE_ALIGN (field_type));
1373
1374 if (pos != 0)
1375 {
1376 /* We need to pass in the alignment the DECL is known to have.
1377 This is the lowest-order bit set in POS, but no more than
1378 the alignment of the record, if one is specified. Note
1379 that an alignment of 0 is taken as infinite. */
1380 unsigned int known_align;
1381
1382 if (host_integerp (pos, 1))
1383 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1384 else
1385 known_align = BITS_PER_UNIT;
1386
1387 if (TYPE_ALIGN (record_type)
1388 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1389 known_align = TYPE_ALIGN (record_type);
1390
1391 layout_decl (field_decl, known_align);
1392 SET_DECL_OFFSET_ALIGN (field_decl,
1393 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1394 : BITS_PER_UNIT);
1395 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1396 &DECL_FIELD_BIT_OFFSET (field_decl),
1397 DECL_OFFSET_ALIGN (field_decl), pos);
1398
1399 DECL_HAS_REP_P (field_decl) = 1;
1400 }
1401
1402 /* If the field type is passed by reference, we will have pointers to the
1403 field, so it is addressable. */
1404 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1405 addressable = 1;
1406
1407 /* ??? For now, we say that any field of aggregate type is addressable
1408 because the front end may take 'Reference of it. */
1409 if (AGGREGATE_TYPE_P (field_type))
1410 addressable = 1;
1411
1412 /* Mark the decl as nonaddressable if it is indicated so semantically,
1413 meaning we won't ever attempt to take the address of the field.
1414
1415 It may also be "technically" nonaddressable, meaning that even if we
1416 attempt to take the field's address we will actually get the address of a
1417 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1418 we have at this point is not accurate enough, so we don't account for
1419 this here and let finish_record_type decide. */
1420 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1421
1422 return field_decl;
1423 }
1424
1425 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1426 effects, has the value of zero. */
1427
1428 static int
1429 value_zerop (tree exp)
1430 {
1431 if (TREE_CODE (exp) == COMPOUND_EXPR)
1432 return value_zerop (TREE_OPERAND (exp, 1));
1433
1434 return integer_zerop (exp);
1435 }
1436 \f
1437 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1438 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1439 readonly (either an IN parameter or an address of a pass-by-ref
1440 parameter). */
1441
1442 tree
1443 create_param_decl (tree param_name, tree param_type, int readonly)
1444 {
1445 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1446
1447 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1448 lead to various ABI violations. */
1449 if (targetm.calls.promote_prototypes (param_type)
1450 && (TREE_CODE (param_type) == INTEGER_TYPE
1451 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1452 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1453 {
1454 /* We have to be careful about biased types here. Make a subtype
1455 of integer_type_node with the proper biasing. */
1456 if (TREE_CODE (param_type) == INTEGER_TYPE
1457 && TYPE_BIASED_REPRESENTATION_P (param_type))
1458 {
1459 param_type
1460 = copy_type (build_range_type (integer_type_node,
1461 TYPE_MIN_VALUE (param_type),
1462 TYPE_MAX_VALUE (param_type)));
1463
1464 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1465 }
1466 else
1467 param_type = integer_type_node;
1468 }
1469
1470 DECL_ARG_TYPE (param_decl) = param_type;
1471 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1472 TREE_READONLY (param_decl) = readonly;
1473 return param_decl;
1474 }
1475 \f
1476 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1477
1478 void
1479 process_attributes (tree decl, struct attrib *attr_list)
1480 {
1481 for (; attr_list; attr_list = attr_list->next)
1482 switch (attr_list->type)
1483 {
1484 case ATTR_MACHINE_ATTRIBUTE:
1485 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1486 NULL_TREE),
1487 ATTR_FLAG_TYPE_IN_PLACE);
1488 break;
1489
1490 case ATTR_LINK_ALIAS:
1491 TREE_STATIC (decl) = 1;
1492 assemble_alias (decl, attr_list->name);
1493 break;
1494
1495 case ATTR_WEAK_EXTERNAL:
1496 if (SUPPORTS_WEAK)
1497 declare_weak (decl);
1498 else
1499 post_error ("?weak declarations not supported on this target",
1500 attr_list->error_point);
1501 break;
1502
1503 case ATTR_LINK_SECTION:
1504 if (targetm.have_named_sections)
1505 {
1506 DECL_SECTION_NAME (decl)
1507 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1508 IDENTIFIER_POINTER (attr_list->name));
1509 }
1510 else
1511 post_error ("?section attributes are not supported for this target",
1512 attr_list->error_point);
1513 break;
1514 }
1515 }
1516 \f
1517 /* Add some pending elaborations on the list. */
1518
1519 void
1520 add_pending_elaborations (tree var_decl, tree var_init)
1521 {
1522 if (var_init != 0)
1523 Check_Elaboration_Code_Allowed (error_gnat_node);
1524
1525 pending_elaborations
1526 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1527 }
1528
1529 /* Obtain any pending elaborations and clear the old list. */
1530
1531 tree
1532 get_pending_elaborations (void)
1533 {
1534 /* Each thing added to the list went on the end; we want it on the
1535 beginning. */
1536 tree result = TREE_CHAIN (pending_elaborations);
1537
1538 TREE_CHAIN (pending_elaborations) = 0;
1539 return result;
1540 }
1541
1542 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1543 of 2. */
1544
1545 static int
1546 value_factor_p (tree value, int factor)
1547 {
1548 if (host_integerp (value, 1))
1549 return tree_low_cst (value, 1) % factor == 0;
1550
1551 if (TREE_CODE (value) == MULT_EXPR)
1552 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1553 || value_factor_p (TREE_OPERAND (value, 1), factor));
1554
1555 return 0;
1556 }
1557
1558 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1559 unless we can prove these 2 fields are laid out in such a way that no gap
1560 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1561 is the distance in bits between the end of PREV_FIELD and the starting
1562 position of CURR_FIELD. It is ignored if null. */
1563
1564 static int
1565 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1566 {
1567 /* If this is the first field of the record, there cannot be any gap */
1568 if (!prev_field)
1569 return 0;
1570
1571 /* If the previous field is a union type, then return False: The only
1572 time when such a field is not the last field of the record is when
1573 there are other components at fixed positions after it (meaning there
1574 was a rep clause for every field), in which case we don't want the
1575 alignment constraint to override them. */
1576 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1577 return 0;
1578
1579 /* If the distance between the end of prev_field and the begining of
1580 curr_field is constant, then there is a gap if the value of this
1581 constant is not null. */
1582 if (offset && host_integerp (offset, 1))
1583 return (!integer_zerop (offset));
1584
1585 /* If the size and position of the previous field are constant,
1586 then check the sum of this size and position. There will be a gap
1587 iff it is not multiple of the current field alignment. */
1588 if (host_integerp (DECL_SIZE (prev_field), 1)
1589 && host_integerp (bit_position (prev_field), 1))
1590 return ((tree_low_cst (bit_position (prev_field), 1)
1591 + tree_low_cst (DECL_SIZE (prev_field), 1))
1592 % DECL_ALIGN (curr_field) != 0);
1593
1594 /* If both the position and size of the previous field are multiples
1595 of the current field alignment, there can not be any gap. */
1596 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1597 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1598 return 0;
1599
1600 /* Fallback, return that there may be a potential gap */
1601 return 1;
1602 }
1603
1604 /* Return nonzero if there are pending elaborations. */
1605
1606 int
1607 pending_elaborations_p (void)
1608 {
1609 return TREE_CHAIN (pending_elaborations) != 0;
1610 }
1611
1612 /* Save a copy of the current pending elaboration list and make a new
1613 one. */
1614
1615 void
1616 push_pending_elaborations (void)
1617 {
1618 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1619
1620 p->next = elist_stack;
1621 p->elab_list = pending_elaborations;
1622 elist_stack = p;
1623 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1624 }
1625
1626 /* Pop the stack of pending elaborations. */
1627
1628 void
1629 pop_pending_elaborations (void)
1630 {
1631 struct e_stack *p = elist_stack;
1632
1633 pending_elaborations = p->elab_list;
1634 elist_stack = p->next;
1635 }
1636
1637 /* Return the current position in pending_elaborations so we can insert
1638 elaborations after that point. */
1639
1640 tree
1641 get_elaboration_location (void)
1642 {
1643 return tree_last (pending_elaborations);
1644 }
1645
1646 /* Insert the current elaborations after ELAB, which is in some elaboration
1647 list. */
1648
1649 void
1650 insert_elaboration_list (tree elab)
1651 {
1652 tree next = TREE_CHAIN (elab);
1653
1654 if (TREE_CHAIN (pending_elaborations))
1655 {
1656 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1657 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1658 TREE_CHAIN (pending_elaborations) = 0;
1659 }
1660 }
1661
1662 /* Returns a LABEL_DECL node for LABEL_NAME. */
1663
1664 tree
1665 create_label_decl (tree label_name)
1666 {
1667 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1668
1669 DECL_CONTEXT (label_decl) = current_function_decl;
1670 DECL_MODE (label_decl) = VOIDmode;
1671 DECL_SOURCE_LOCATION (label_decl) = input_location;
1672
1673 return label_decl;
1674 }
1675 \f
1676 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1677 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1678 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1679 PARM_DECL nodes chained through the TREE_CHAIN field).
1680
1681 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1682 appropriate fields in the FUNCTION_DECL. */
1683
1684 tree
1685 create_subprog_decl (tree subprog_name,
1686 tree asm_name,
1687 tree subprog_type,
1688 tree param_decl_list,
1689 int inline_flag,
1690 int public_flag,
1691 int extern_flag,
1692 struct attrib *attr_list)
1693 {
1694 tree return_type = TREE_TYPE (subprog_type);
1695 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1696
1697 /* If this is a function nested inside an inlined external function, it
1698 means we aren't going to compile the outer function unless it is
1699 actually inlined, so do the same for us. */
1700 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1701 && DECL_EXTERNAL (current_function_decl))
1702 extern_flag = 1;
1703
1704 DECL_EXTERNAL (subprog_decl) = extern_flag;
1705 TREE_PUBLIC (subprog_decl) = public_flag;
1706 DECL_INLINE (subprog_decl) = inline_flag;
1707 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1708 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1709 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1710 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1711 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1712
1713 if (asm_name != 0)
1714 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1715
1716 process_attributes (subprog_decl, attr_list);
1717
1718 /* Add this decl to the current binding level. */
1719 subprog_decl = pushdecl (subprog_decl);
1720
1721 /* Output the assembler code and/or RTL for the declaration. */
1722 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1723
1724 return subprog_decl;
1725 }
1726 \f
1727 /* Count how deep we are into nested functions. This is because
1728 we shouldn't call the backend function context routines unless we
1729 are in a nested function. */
1730
1731 static int function_nesting_depth;
1732
1733 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1734 body. This routine needs to be invoked before processing the declarations
1735 appearing in the subprogram. */
1736
1737 void
1738 begin_subprog_body (tree subprog_decl)
1739 {
1740 tree param_decl;
1741
1742 if (function_nesting_depth++ != 0)
1743 push_function_context ();
1744
1745 announce_function (subprog_decl);
1746
1747 /* Make this field nonzero so further routines know that this is not
1748 tentative. error_mark_node is replaced below with the adequate BLOCK. */
1749 DECL_INITIAL (subprog_decl) = error_mark_node;
1750
1751 /* This function exists in static storage. This does not mean `static' in
1752 the C sense! */
1753 TREE_STATIC (subprog_decl) = 1;
1754
1755 /* Enter a new binding level and show that all the parameters belong to
1756 this function. */
1757 current_function_decl = subprog_decl;
1758 gnat_pushlevel ();
1759
1760 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1761 param_decl = TREE_CHAIN (param_decl))
1762 DECL_CONTEXT (param_decl) = subprog_decl;
1763
1764 init_function_start (subprog_decl);
1765 expand_function_start (subprog_decl, 0);
1766
1767 /* If this function is `main', emit a call to `__main'
1768 to run global initializers, etc. */
1769 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1770 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1771 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1772 expand_main_function ();
1773 }
1774
1775 /* Finish the definition of the current subprogram and compile it all the way
1776 to assembler language output. */
1777
1778 void
1779 end_subprog_body (void)
1780 {
1781 tree decl;
1782 tree cico_list;
1783
1784 /* Mark the BLOCK for this level as being for this function and pop the
1785 level. Since the vars in it are the parameters, clear them. */
1786 BLOCK_VARS (current_binding_level->block) = 0;
1787 BLOCK_SUPERCONTEXT (current_binding_level->block) = current_function_decl;
1788 DECL_INITIAL (current_function_decl) = current_binding_level->block;
1789 gnat_poplevel ();
1790
1791 /* Mark the RESULT_DECL as being in this subprogram. */
1792 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1793
1794 expand_function_end ();
1795
1796 /* If this is a nested function, push a new GC context. That will keep
1797 local variables on the stack from being collected while we're doing
1798 the compilation of this function. */
1799 if (function_nesting_depth > 1)
1800 ggc_push_context ();
1801
1802 /* If we're only annotating types, don't actually compile this
1803 function. */
1804 if (!type_annotate_only)
1805 {
1806 rest_of_compilation (current_function_decl);
1807 if (! DECL_DEFER_OUTPUT (current_function_decl))
1808 {
1809 free_after_compilation (cfun);
1810 DECL_STRUCT_FUNCTION (current_function_decl) = 0;
1811 }
1812 cfun = 0;
1813 }
1814
1815 if (function_nesting_depth > 1)
1816 ggc_pop_context ();
1817
1818 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1819 not be seen when we call this function and will be in
1820 unallocated memory anyway. */
1821 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1822 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1823 TREE_VALUE (cico_list) = 0;
1824
1825 if (DECL_STRUCT_FUNCTION (current_function_decl) == 0)
1826 {
1827 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1828 was saved for inline, in which case the DECL_RTLs are in
1829 preserved memory. */
1830 for (decl = DECL_ARGUMENTS (current_function_decl);
1831 decl != 0; decl = TREE_CHAIN (decl))
1832 {
1833 SET_DECL_RTL (decl, 0);
1834 DECL_INCOMING_RTL (decl) = 0;
1835 }
1836
1837 /* Similarly, discard DECL_RTL of the return value. */
1838 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1839
1840 /* But DECL_INITIAL must remain nonzero so we know this
1841 was an actual function definition unless toplev.c decided not
1842 to inline it. */
1843 if (DECL_INITIAL (current_function_decl) != 0)
1844 DECL_INITIAL (current_function_decl) = error_mark_node;
1845
1846 DECL_ARGUMENTS (current_function_decl) = 0;
1847 }
1848
1849 /* If we are not at the bottom of the function nesting stack, pop up to
1850 the containing function. Otherwise show we aren't in any function. */
1851 if (--function_nesting_depth != 0)
1852 pop_function_context ();
1853 else
1854 current_function_decl = 0;
1855 }
1856 \f
1857 /* Return a definition for a builtin function named NAME and whose data type
1858 is TYPE. TYPE should be a function type with argument types.
1859 FUNCTION_CODE tells later passes how to compile calls to this function.
1860 See tree.h for its possible values.
1861
1862 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1863 the name to be called if we can't opencode the function. If
1864 ATTRS is nonzero, use that for the function attribute list. */
1865
1866 tree
1867 builtin_function (const char *name,
1868 tree type,
1869 int function_code,
1870 enum built_in_class class,
1871 const char *library_name,
1872 tree attrs)
1873 {
1874 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1875
1876 DECL_EXTERNAL (decl) = 1;
1877 TREE_PUBLIC (decl) = 1;
1878 if (library_name)
1879 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1880
1881 pushdecl (decl);
1882 DECL_BUILT_IN_CLASS (decl) = class;
1883 DECL_FUNCTION_CODE (decl) = function_code;
1884 if (attrs)
1885 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1886 return decl;
1887 }
1888
1889 /* Return an integer type with the number of bits of precision given by
1890 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1891 it is a signed type. */
1892
1893 tree
1894 gnat_type_for_size (unsigned precision, int unsignedp)
1895 {
1896 tree t;
1897 char type_name[20];
1898
1899 if (precision <= 2 * MAX_BITS_PER_WORD
1900 && signed_and_unsigned_types[precision][unsignedp] != 0)
1901 return signed_and_unsigned_types[precision][unsignedp];
1902
1903 if (unsignedp)
1904 t = make_unsigned_type (precision);
1905 else
1906 t = make_signed_type (precision);
1907
1908 if (precision <= 2 * MAX_BITS_PER_WORD)
1909 signed_and_unsigned_types[precision][unsignedp] = t;
1910
1911 if (TYPE_NAME (t) == 0)
1912 {
1913 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1914 TYPE_NAME (t) = get_identifier (type_name);
1915 }
1916
1917 return t;
1918 }
1919
1920 /* Likewise for floating-point types. */
1921
1922 static tree
1923 float_type_for_precision (int precision, enum machine_mode mode)
1924 {
1925 tree t;
1926 char type_name[20];
1927
1928 if (float_types[(int) mode] != 0)
1929 return float_types[(int) mode];
1930
1931 float_types[(int) mode] = t = make_node (REAL_TYPE);
1932 TYPE_PRECISION (t) = precision;
1933 layout_type (t);
1934
1935 if (TYPE_MODE (t) != mode)
1936 gigi_abort (414);
1937
1938 if (TYPE_NAME (t) == 0)
1939 {
1940 sprintf (type_name, "FLOAT_%d", precision);
1941 TYPE_NAME (t) = get_identifier (type_name);
1942 }
1943
1944 return t;
1945 }
1946
1947 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1948 an unsigned type; otherwise a signed type is returned. */
1949
1950 tree
1951 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1952 {
1953 if (mode == BLKmode)
1954 return NULL_TREE;
1955 else if (mode == VOIDmode)
1956 return void_type_node;
1957 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1958 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1959 else
1960 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1961 }
1962
1963 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1964
1965 tree
1966 gnat_unsigned_type (tree type_node)
1967 {
1968 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1969
1970 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1971 {
1972 type = copy_node (type);
1973 TREE_TYPE (type) = type_node;
1974 }
1975 else if (TREE_TYPE (type_node) != 0
1976 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1977 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1978 {
1979 type = copy_node (type);
1980 TREE_TYPE (type) = TREE_TYPE (type_node);
1981 }
1982
1983 return type;
1984 }
1985
1986 /* Return the signed version of a TYPE_NODE, a scalar type. */
1987
1988 tree
1989 gnat_signed_type (tree type_node)
1990 {
1991 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1992
1993 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1994 {
1995 type = copy_node (type);
1996 TREE_TYPE (type) = type_node;
1997 }
1998 else if (TREE_TYPE (type_node) != 0
1999 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2000 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2001 {
2002 type = copy_node (type);
2003 TREE_TYPE (type) = TREE_TYPE (type_node);
2004 }
2005
2006 return type;
2007 }
2008
2009 /* Return a type the same as TYPE except unsigned or signed according to
2010 UNSIGNEDP. */
2011
2012 tree
2013 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2014 {
2015 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
2016 return type;
2017 else
2018 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2019 }
2020 \f
2021 /* EXP is an expression for the size of an object. If this size contains
2022 discriminant references, replace them with the maximum (if MAX_P) or
2023 minimum (if ! MAX_P) possible value of the discriminant. */
2024
2025 tree
2026 max_size (tree exp, int max_p)
2027 {
2028 enum tree_code code = TREE_CODE (exp);
2029 tree type = TREE_TYPE (exp);
2030
2031 switch (TREE_CODE_CLASS (code))
2032 {
2033 case 'd':
2034 case 'c':
2035 return exp;
2036
2037 case 'x':
2038 if (code == TREE_LIST)
2039 return tree_cons (TREE_PURPOSE (exp),
2040 max_size (TREE_VALUE (exp), max_p),
2041 TREE_CHAIN (exp) != 0
2042 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2043 break;
2044
2045 case 'r':
2046 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2047 modify. Otherwise, we treat it like a variable. */
2048 if (! CONTAINS_PLACEHOLDER_P (exp))
2049 return exp;
2050
2051 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2052 return
2053 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2054
2055 case '<':
2056 return max_p ? size_one_node : size_zero_node;
2057
2058 case '1':
2059 case '2':
2060 case 'e':
2061 switch (TREE_CODE_LENGTH (code))
2062 {
2063 case 1:
2064 if (code == NON_LVALUE_EXPR)
2065 return max_size (TREE_OPERAND (exp, 0), max_p);
2066 else
2067 return
2068 fold (build1 (code, type,
2069 max_size (TREE_OPERAND (exp, 0),
2070 code == NEGATE_EXPR ? ! max_p : max_p)));
2071
2072 case 2:
2073 if (code == RTL_EXPR)
2074 gigi_abort (407);
2075 else if (code == COMPOUND_EXPR)
2076 return max_size (TREE_OPERAND (exp, 1), max_p);
2077
2078 {
2079 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2080 tree rhs = max_size (TREE_OPERAND (exp, 1),
2081 code == MINUS_EXPR ? ! max_p : max_p);
2082
2083 /* Special-case wanting the maximum value of a MIN_EXPR.
2084 In that case, if one side overflows, return the other.
2085 sizetype is signed, but we know sizes are non-negative.
2086 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2087 overflowing or the maximum possible value and the RHS
2088 a variable. */
2089 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2090 return lhs;
2091 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2092 return rhs;
2093 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2094 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2095 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2096 && ! TREE_CONSTANT (rhs))
2097 return lhs;
2098 else
2099 return fold (build (code, type, lhs, rhs));
2100 }
2101
2102 case 3:
2103 if (code == SAVE_EXPR)
2104 return exp;
2105 else if (code == COND_EXPR)
2106 return fold (build (max_p ? MAX_EXPR : MIN_EXPR, type,
2107 max_size (TREE_OPERAND (exp, 1), max_p),
2108 max_size (TREE_OPERAND (exp, 2), max_p)));
2109 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2110 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2111 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2112 }
2113 }
2114
2115 gigi_abort (408);
2116 }
2117 \f
2118 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2119 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2120 Return a constructor for the template. */
2121
2122 tree
2123 build_template (tree template_type, tree array_type, tree expr)
2124 {
2125 tree template_elts = NULL_TREE;
2126 tree bound_list = NULL_TREE;
2127 tree field;
2128
2129 if (TREE_CODE (array_type) == RECORD_TYPE
2130 && (TYPE_IS_PADDING_P (array_type)
2131 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2132 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2133
2134 if (TREE_CODE (array_type) == ARRAY_TYPE
2135 || (TREE_CODE (array_type) == INTEGER_TYPE
2136 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2137 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2138
2139 /* First make the list for a CONSTRUCTOR for the template. Go down the
2140 field list of the template instead of the type chain because this
2141 array might be an Ada array of arrays and we can't tell where the
2142 nested arrays stop being the underlying object. */
2143
2144 for (field = TYPE_FIELDS (template_type); field;
2145 (bound_list != 0
2146 ? (bound_list = TREE_CHAIN (bound_list))
2147 : (array_type = TREE_TYPE (array_type))),
2148 field = TREE_CHAIN (TREE_CHAIN (field)))
2149 {
2150 tree bounds, min, max;
2151
2152 /* If we have a bound list, get the bounds from there. Likewise
2153 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2154 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2155 This will give us a maximum range. */
2156 if (bound_list != 0)
2157 bounds = TREE_VALUE (bound_list);
2158 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2159 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2160 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2161 && DECL_BY_COMPONENT_PTR_P (expr))
2162 bounds = TREE_TYPE (field);
2163 else
2164 gigi_abort (411);
2165
2166 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2167 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2168
2169 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2170 substitute it from OBJECT. */
2171 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2172 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2173
2174 template_elts = tree_cons (TREE_CHAIN (field), max,
2175 tree_cons (field, min, template_elts));
2176 }
2177
2178 return gnat_build_constructor (template_type, nreverse (template_elts));
2179 }
2180 \f
2181 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2182 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2183 in the type contains in its DECL_INITIAL the expression to use when
2184 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2185 to print out an error message if the mechanism cannot be applied to
2186 an object of that type and also for the name. */
2187
2188 tree
2189 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2190 {
2191 tree record_type = make_node (RECORD_TYPE);
2192 tree field_list = 0;
2193 int class;
2194 int dtype = 0;
2195 tree inner_type;
2196 int ndim;
2197 int i;
2198 tree *idx_arr;
2199 tree tem;
2200
2201 /* If TYPE is an unconstrained array, use the underlying array type. */
2202 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2203 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2204
2205 /* If this is an array, compute the number of dimensions in the array,
2206 get the index types, and point to the inner type. */
2207 if (TREE_CODE (type) != ARRAY_TYPE)
2208 ndim = 0;
2209 else
2210 for (ndim = 1, inner_type = type;
2211 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2212 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2213 ndim++, inner_type = TREE_TYPE (inner_type))
2214 ;
2215
2216 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2217
2218 if (mech != By_Descriptor_NCA
2219 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2220 for (i = ndim - 1, inner_type = type;
2221 i >= 0;
2222 i--, inner_type = TREE_TYPE (inner_type))
2223 idx_arr[i] = TYPE_DOMAIN (inner_type);
2224 else
2225 for (i = 0, inner_type = type;
2226 i < ndim;
2227 i++, inner_type = TREE_TYPE (inner_type))
2228 idx_arr[i] = TYPE_DOMAIN (inner_type);
2229
2230 /* Now get the DTYPE value. */
2231 switch (TREE_CODE (type))
2232 {
2233 case INTEGER_TYPE:
2234 case ENUMERAL_TYPE:
2235 if (TYPE_VAX_FLOATING_POINT_P (type))
2236 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2237 {
2238 case 6:
2239 dtype = 10;
2240 break;
2241 case 9:
2242 dtype = 11;
2243 break;
2244 case 15:
2245 dtype = 27;
2246 break;
2247 }
2248 else
2249 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2250 {
2251 case 8:
2252 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2253 break;
2254 case 16:
2255 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2256 break;
2257 case 32:
2258 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2259 break;
2260 case 64:
2261 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2262 break;
2263 case 128:
2264 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2265 break;
2266 }
2267 break;
2268
2269 case REAL_TYPE:
2270 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2271 break;
2272
2273 case COMPLEX_TYPE:
2274 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2275 && TYPE_VAX_FLOATING_POINT_P (type))
2276 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2277 {
2278 case 6:
2279 dtype = 12;
2280 break;
2281 case 9:
2282 dtype = 13;
2283 break;
2284 case 15:
2285 dtype = 29;
2286 }
2287 else
2288 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2289 break;
2290
2291 case ARRAY_TYPE:
2292 dtype = 14;
2293 break;
2294
2295 default:
2296 break;
2297 }
2298
2299 /* Get the CLASS value. */
2300 switch (mech)
2301 {
2302 case By_Descriptor_A:
2303 class = 4;
2304 break;
2305 case By_Descriptor_NCA:
2306 class = 10;
2307 break;
2308 case By_Descriptor_SB:
2309 class = 15;
2310 break;
2311 default:
2312 class = 1;
2313 }
2314
2315 /* Make the type for a descriptor for VMS. The first four fields
2316 are the same for all types. */
2317
2318 field_list
2319 = chainon (field_list,
2320 make_descriptor_field
2321 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2322 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2323
2324 field_list = chainon (field_list,
2325 make_descriptor_field ("DTYPE",
2326 gnat_type_for_size (8, 1),
2327 record_type, size_int (dtype)));
2328 field_list = chainon (field_list,
2329 make_descriptor_field ("CLASS",
2330 gnat_type_for_size (8, 1),
2331 record_type, size_int (class)));
2332
2333 field_list
2334 = chainon (field_list,
2335 make_descriptor_field
2336 ("POINTER",
2337 build_pointer_type_for_mode (type, SImode, false), record_type,
2338 build1 (ADDR_EXPR,
2339 build_pointer_type_for_mode (type, SImode, false),
2340 build (PLACEHOLDER_EXPR, type))));
2341
2342 switch (mech)
2343 {
2344 case By_Descriptor:
2345 case By_Descriptor_S:
2346 break;
2347
2348 case By_Descriptor_SB:
2349 field_list
2350 = chainon (field_list,
2351 make_descriptor_field
2352 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2353 TREE_CODE (type) == ARRAY_TYPE
2354 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2355 field_list
2356 = chainon (field_list,
2357 make_descriptor_field
2358 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2359 TREE_CODE (type) == ARRAY_TYPE
2360 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2361 break;
2362
2363 case By_Descriptor_A:
2364 case By_Descriptor_NCA:
2365 field_list = chainon (field_list,
2366 make_descriptor_field ("SCALE",
2367 gnat_type_for_size (8, 1),
2368 record_type,
2369 size_zero_node));
2370
2371 field_list = chainon (field_list,
2372 make_descriptor_field ("DIGITS",
2373 gnat_type_for_size (8, 1),
2374 record_type,
2375 size_zero_node));
2376
2377 field_list
2378 = chainon (field_list,
2379 make_descriptor_field
2380 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2381 size_int (mech == By_Descriptor_NCA
2382 ? 0
2383 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2384 : (TREE_CODE (type) == ARRAY_TYPE
2385 && TYPE_CONVENTION_FORTRAN_P (type)
2386 ? 224 : 192))));
2387
2388 field_list = chainon (field_list,
2389 make_descriptor_field ("DIMCT",
2390 gnat_type_for_size (8, 1),
2391 record_type,
2392 size_int (ndim)));
2393
2394 field_list = chainon (field_list,
2395 make_descriptor_field ("ARSIZE",
2396 gnat_type_for_size (32, 1),
2397 record_type,
2398 size_in_bytes (type)));
2399
2400 /* Now build a pointer to the 0,0,0... element. */
2401 tem = build (PLACEHOLDER_EXPR, type);
2402 for (i = 0, inner_type = type; i < ndim;
2403 i++, inner_type = TREE_TYPE (inner_type))
2404 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2405 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2406
2407 field_list
2408 = chainon (field_list,
2409 make_descriptor_field
2410 ("A0",
2411 build_pointer_type_for_mode (inner_type, SImode, false),
2412 record_type,
2413 build1 (ADDR_EXPR,
2414 build_pointer_type_for_mode (inner_type, SImode,
2415 false),
2416 tem)));
2417
2418 /* Next come the addressing coefficients. */
2419 tem = size_int (1);
2420 for (i = 0; i < ndim; i++)
2421 {
2422 char fname[3];
2423 tree idx_length
2424 = size_binop (MULT_EXPR, tem,
2425 size_binop (PLUS_EXPR,
2426 size_binop (MINUS_EXPR,
2427 TYPE_MAX_VALUE (idx_arr[i]),
2428 TYPE_MIN_VALUE (idx_arr[i])),
2429 size_int (1)));
2430
2431 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2432 fname[1] = '0' + i, fname[2] = 0;
2433 field_list
2434 = chainon (field_list,
2435 make_descriptor_field (fname,
2436 gnat_type_for_size (32, 1),
2437 record_type, idx_length));
2438
2439 if (mech == By_Descriptor_NCA)
2440 tem = idx_length;
2441 }
2442
2443 /* Finally here are the bounds. */
2444 for (i = 0; i < ndim; i++)
2445 {
2446 char fname[3];
2447
2448 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2449 field_list
2450 = chainon (field_list,
2451 make_descriptor_field
2452 (fname, gnat_type_for_size (32, 1), record_type,
2453 TYPE_MIN_VALUE (idx_arr[i])));
2454
2455 fname[0] = 'U';
2456 field_list
2457 = chainon (field_list,
2458 make_descriptor_field
2459 (fname, gnat_type_for_size (32, 1), record_type,
2460 TYPE_MAX_VALUE (idx_arr[i])));
2461 }
2462 break;
2463
2464 default:
2465 post_error ("unsupported descriptor type for &", gnat_entity);
2466 }
2467
2468 finish_record_type (record_type, field_list, 0, 1);
2469 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2470 record_type));
2471
2472 return record_type;
2473 }
2474
2475 /* Utility routine for above code to make a field. */
2476
2477 static tree
2478 make_descriptor_field (const char *name, tree type,
2479 tree rec_type, tree initial)
2480 {
2481 tree field
2482 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2483
2484 DECL_INITIAL (field) = initial;
2485 return field;
2486 }
2487 \f
2488 /* Build a type to be used to represent an aliased object whose nominal
2489 type is an unconstrained array. This consists of a RECORD_TYPE containing
2490 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2491 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2492 is used to represent an arbitrary unconstrained object. Use NAME
2493 as the name of the record. */
2494
2495 tree
2496 build_unc_object_type (tree template_type, tree object_type, tree name)
2497 {
2498 tree type = make_node (RECORD_TYPE);
2499 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2500 template_type, type, 0, 0, 0, 1);
2501 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2502 type, 0, 0, 0, 1);
2503
2504 TYPE_NAME (type) = name;
2505 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2506 finish_record_type (type,
2507 chainon (chainon (NULL_TREE, template_field),
2508 array_field),
2509 0, 0);
2510
2511 return type;
2512 }
2513 \f
2514 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2515 the normal case this is just two adjustments, but we have more to do
2516 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2517
2518 void
2519 update_pointer_to (tree old_type, tree new_type)
2520 {
2521 tree ptr = TYPE_POINTER_TO (old_type);
2522 tree ref = TYPE_REFERENCE_TO (old_type);
2523 tree type;
2524
2525 /* If this is the main variant, process all the other variants first. */
2526 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2527 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2528 type = TYPE_NEXT_VARIANT (type))
2529 update_pointer_to (type, new_type);
2530
2531 /* If no pointer or reference, we are done. */
2532 if (ptr == 0 && ref == 0)
2533 return;
2534
2535 /* Merge the old type qualifiers in the new type.
2536
2537 Each old variant has qualifiers for specific reasons, and the new
2538 designated type as well. Each set of qualifiers represents useful
2539 information grabbed at some point, and merging the two simply unifies
2540 these inputs into the final type description.
2541
2542 Consider for instance a volatile type frozen after an access to constant
2543 type designating it. After the designated type freeze, we get here with a
2544 volatile new_type and a dummy old_type with a readonly variant, created
2545 when the access type was processed. We shall make a volatile and readonly
2546 designated type, because that's what it really is.
2547
2548 We might also get here for a non-dummy old_type variant with different
2549 qualifiers than the new_type ones, for instance in some cases of pointers
2550 to private record type elaboration (see the comments around the call to
2551 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2552 qualifiers in thoses cases too, to avoid accidentally discarding the
2553 initial set, and will often end up with old_type == new_type then. */
2554 new_type = build_qualified_type (new_type,
2555 TYPE_QUALS (old_type)
2556 | TYPE_QUALS (new_type));
2557
2558 /* If the new type and the old one are identical, there is nothing to
2559 update. */
2560 if (old_type == new_type)
2561 return;
2562
2563 /* Otherwise, first handle the simple case. */
2564 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2565 {
2566 TYPE_POINTER_TO (new_type) = ptr;
2567 TYPE_REFERENCE_TO (new_type) = ref;
2568
2569 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2570 {
2571 TREE_TYPE (ptr) = new_type;
2572
2573 if (TYPE_NAME (ptr) != 0
2574 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2575 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2576 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2577 global_bindings_p (), 0);
2578 }
2579
2580 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2581 {
2582 TREE_TYPE (ref) = new_type;
2583
2584 if (TYPE_NAME (ref) != 0
2585 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2586 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2587 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2588 global_bindings_p (), 0);
2589 }
2590 }
2591
2592 /* Now deal with the unconstrained array case. In this case the "pointer"
2593 is actually a RECORD_TYPE where the types of both fields are
2594 pointers to void. In that case, copy the field list from the
2595 old type to the new one and update the fields' context. */
2596 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2597 gigi_abort (412);
2598
2599 else
2600 {
2601 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2602 tree ptr_temp_type;
2603 tree new_ref;
2604 tree var;
2605
2606 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2607 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2608 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2609
2610 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2611 template bounds.
2612
2613 ??? This is now the only use of gnat_substitute_in_type, which
2614 is now a very "heavy" routine to do this, so it should be replaced
2615 at some point. */
2616 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2617 new_ref = build (COMPONENT_REF, ptr_temp_type,
2618 build (PLACEHOLDER_EXPR, ptr),
2619 TREE_CHAIN (TYPE_FIELDS (ptr)));
2620
2621 update_pointer_to
2622 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2623 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2624 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2625
2626 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2627 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2628
2629 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2630 = TREE_TYPE (new_type) = ptr;
2631
2632 /* Now handle updating the allocation record, what the thin pointer
2633 points to. Update all pointers from the old record into the new
2634 one, update the types of the fields, and recompute the size. */
2635
2636 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2637
2638 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2639 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2640 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2641 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2642 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2643 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2644 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2645
2646 TYPE_SIZE (new_obj_rec)
2647 = size_binop (PLUS_EXPR,
2648 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2649 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2650 TYPE_SIZE_UNIT (new_obj_rec)
2651 = size_binop (PLUS_EXPR,
2652 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2653 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2654 rest_of_type_compilation (ptr, global_bindings_p ());
2655 }
2656 }
2657 \f
2658 /* Convert a pointer to a constrained array into a pointer to a fat
2659 pointer. This involves making or finding a template. */
2660
2661 static tree
2662 convert_to_fat_pointer (tree type, tree expr)
2663 {
2664 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2665 tree template, template_addr;
2666 tree etype = TREE_TYPE (expr);
2667
2668 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2669 pointer to the template and array. */
2670 if (integer_zerop (expr))
2671 return
2672 gnat_build_constructor
2673 (type,
2674 tree_cons (TYPE_FIELDS (type),
2675 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2676 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2677 convert (build_pointer_type (template_type),
2678 expr),
2679 NULL_TREE)));
2680
2681 /* If EXPR is a thin pointer, make the template and data from the record. */
2682
2683 else if (TYPE_THIN_POINTER_P (etype))
2684 {
2685 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2686
2687 expr = save_expr (expr);
2688 if (TREE_CODE (expr) == ADDR_EXPR)
2689 expr = TREE_OPERAND (expr, 0);
2690 else
2691 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2692
2693 template = build_component_ref (expr, NULL_TREE, fields, 0);
2694 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2695 build_component_ref (expr, NULL_TREE,
2696 TREE_CHAIN (fields), 0));
2697 }
2698 else
2699 /* Otherwise, build the constructor for the template. */
2700 template = build_template (template_type, TREE_TYPE (etype), expr);
2701
2702 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2703
2704 /* The result is a CONSTRUCTOR for the fat pointer.
2705
2706 If expr is an argument of a foreign convention subprogram, the type it
2707 points to is directly the component type. In this case, the expression
2708 type may not match the corresponding FIELD_DECL type at this point, so we
2709 call "convert" here to fix that up if necessary. This type consistency is
2710 required, for instance because it ensures that possible later folding of
2711 component_refs against this constructor always yields something of the
2712 same type as the initial reference.
2713
2714 Note that the call to "build_template" above is still fine, because it
2715 will only refer to the provided template_type in this case. */
2716 return
2717 gnat_build_constructor
2718 (type, tree_cons (TYPE_FIELDS (type),
2719 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2720 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2721 template_addr, NULL_TREE)));
2722 }
2723 \f
2724 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2725 is something that is a fat pointer, so convert to it first if it EXPR
2726 is not already a fat pointer. */
2727
2728 static tree
2729 convert_to_thin_pointer (tree type, tree expr)
2730 {
2731 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2732 expr
2733 = convert_to_fat_pointer
2734 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2735
2736 /* We get the pointer to the data and use a NOP_EXPR to make it the
2737 proper GCC type. */
2738 expr
2739 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2740 expr = build1 (NOP_EXPR, type, expr);
2741
2742 return expr;
2743 }
2744 \f
2745 /* Create an expression whose value is that of EXPR,
2746 converted to type TYPE. The TREE_TYPE of the value
2747 is always TYPE. This function implements all reasonable
2748 conversions; callers should filter out those that are
2749 not permitted by the language being compiled. */
2750
2751 tree
2752 convert (tree type, tree expr)
2753 {
2754 enum tree_code code = TREE_CODE (type);
2755 tree etype = TREE_TYPE (expr);
2756 enum tree_code ecode = TREE_CODE (etype);
2757 tree tem;
2758
2759 /* If EXPR is already the right type, we are done. */
2760 if (type == etype)
2761 return expr;
2762 /* If we're converting between two aggregate types that have the same main
2763 variant, just make a NOP_EXPR. */
2764 else if (AGGREGATE_TYPE_P (type)
2765 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2766 return build1 (NOP_EXPR, type, expr);
2767
2768 /* If the input type has padding, remove it by doing a component reference
2769 to the field. If the output type has padding, make a constructor
2770 to build the record. If both input and output have padding and are
2771 of variable size, do this as an unchecked conversion. */
2772 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2773 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2774 && (! TREE_CONSTANT (TYPE_SIZE (type))
2775 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2776 ;
2777 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2778 {
2779 /* If we have just converted to this padded type, just get
2780 the inner expression. */
2781 if (TREE_CODE (expr) == CONSTRUCTOR
2782 && CONSTRUCTOR_ELTS (expr) != 0
2783 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2784 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2785 else
2786 return convert (type, build_component_ref (expr, NULL_TREE,
2787 TYPE_FIELDS (etype), 0));
2788 }
2789 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2790 {
2791 /* If we previously converted from another type and our type is
2792 of variable size, remove the conversion to avoid the need for
2793 variable-size temporaries. */
2794 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2795 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2796 expr = TREE_OPERAND (expr, 0);
2797
2798 /* If we are just removing the padding from expr, convert the original
2799 object if we have variable size. That will avoid the need
2800 for some variable-size temporaries. */
2801 if (TREE_CODE (expr) == COMPONENT_REF
2802 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2803 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2804 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2805 return convert (type, TREE_OPERAND (expr, 0));
2806
2807 /* If the result type is a padded type with a self-referentially-sized
2808 field and the expression type is a record, do this as an
2809 unchecked converstion. */
2810 else if (TREE_CODE (etype) == RECORD_TYPE
2811 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2812 return unchecked_convert (type, expr, 0);
2813
2814 else
2815 return
2816 gnat_build_constructor (type,
2817 tree_cons (TYPE_FIELDS (type),
2818 convert (TREE_TYPE
2819 (TYPE_FIELDS (type)),
2820 expr),
2821 NULL_TREE));
2822 }
2823
2824 /* If the input is a biased type, adjust first. */
2825 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2826 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2827 fold (build1 (GNAT_NOP_EXPR,
2828 TREE_TYPE (etype), expr)),
2829 TYPE_MIN_VALUE (etype))));
2830
2831 /* If the input is a left-justified modular type, we need to extract
2832 the actual object before converting it to any other type with the
2833 exception of an unconstrained array. */
2834 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2835 && code != UNCONSTRAINED_ARRAY_TYPE)
2836 return convert (type, build_component_ref (expr, NULL_TREE,
2837 TYPE_FIELDS (etype), 0));
2838
2839 /* If converting to a type that contains a template, convert to the data
2840 type and then build the template. */
2841 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2842 {
2843 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2844
2845 /* If the source already has a template, get a reference to the
2846 associated array only, as we are going to rebuild a template
2847 for the target type anyway. */
2848 expr = maybe_unconstrained_array (expr);
2849
2850 return
2851 gnat_build_constructor
2852 (type,
2853 tree_cons (TYPE_FIELDS (type),
2854 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2855 obj_type, NULL_TREE),
2856 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2857 convert (obj_type, expr), NULL_TREE)));
2858 }
2859
2860 /* There are some special cases of expressions that we process
2861 specially. */
2862 switch (TREE_CODE (expr))
2863 {
2864 case ERROR_MARK:
2865 return expr;
2866
2867 case TRANSFORM_EXPR:
2868 case NULL_EXPR:
2869 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2870 conversion in gnat_expand_expr. NULL_EXPR does not represent
2871 and actual value, so no conversion is needed. */
2872 expr = copy_node (expr);
2873 TREE_TYPE (expr) = type;
2874 return expr;
2875
2876 case STRING_CST:
2877 case CONSTRUCTOR:
2878 /* If we are converting a STRING_CST to another constrained array type,
2879 just make a new one in the proper type. Likewise for
2880 CONSTRUCTOR if the alias sets are the same. */
2881 if (code == ecode && AGGREGATE_TYPE_P (etype)
2882 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2883 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2884 && (TREE_CODE (expr) == STRING_CST
2885 || get_alias_set (etype) == get_alias_set (type)))
2886 {
2887 expr = copy_node (expr);
2888 TREE_TYPE (expr) = type;
2889 return expr;
2890 }
2891 break;
2892
2893 case COMPONENT_REF:
2894 /* If we are converting between two aggregate types of the same
2895 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2896 This avoid unneeded conversions which makes reference computations
2897 more complex. */
2898 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2899 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2900 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2901 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0)
2902 && get_alias_set (type) == get_alias_set (etype))
2903 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2904 TREE_OPERAND (expr, 1));
2905
2906 break;
2907
2908 case UNCONSTRAINED_ARRAY_REF:
2909 /* Convert this to the type of the inner array by getting the address of
2910 the array from the template. */
2911 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2912 build_component_ref (TREE_OPERAND (expr, 0),
2913 get_identifier ("P_ARRAY"),
2914 NULL_TREE, 0));
2915 etype = TREE_TYPE (expr);
2916 ecode = TREE_CODE (etype);
2917 break;
2918
2919 case VIEW_CONVERT_EXPR:
2920 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2921 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2922 return convert (type, TREE_OPERAND (expr, 0));
2923 break;
2924
2925 case INDIRECT_REF:
2926 /* If both types are record types, just convert the pointer and
2927 make a new INDIRECT_REF.
2928
2929 ??? Disable this for now since it causes problems with the
2930 code in build_binary_op for MODIFY_EXPR which wants to
2931 strip off conversions. But that code really is a mess and
2932 we need to do this a much better way some time. */
2933 if (0
2934 && (TREE_CODE (type) == RECORD_TYPE
2935 || TREE_CODE (type) == UNION_TYPE)
2936 && (TREE_CODE (etype) == RECORD_TYPE
2937 || TREE_CODE (etype) == UNION_TYPE)
2938 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2939 return build_unary_op (INDIRECT_REF, NULL_TREE,
2940 convert (build_pointer_type (type),
2941 TREE_OPERAND (expr, 0)));
2942 break;
2943
2944 default:
2945 break;
2946 }
2947
2948 /* Check for converting to a pointer to an unconstrained array. */
2949 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2950 return convert_to_fat_pointer (type, expr);
2951
2952 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2953 || (code == INTEGER_CST && ecode == INTEGER_CST
2954 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2955 return fold (build1 (NOP_EXPR, type, expr));
2956
2957 switch (code)
2958 {
2959 case VOID_TYPE:
2960 return build1 (CONVERT_EXPR, type, expr);
2961
2962 case INTEGER_TYPE:
2963 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2964 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2965 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2966 return unchecked_convert (type, expr, 0);
2967 else if (TYPE_BIASED_REPRESENTATION_P (type))
2968 return fold (build1 (CONVERT_EXPR, type,
2969 fold (build (MINUS_EXPR, TREE_TYPE (type),
2970 convert (TREE_TYPE (type), expr),
2971 TYPE_MIN_VALUE (type)))));
2972
2973 /* ... fall through ... */
2974
2975 case ENUMERAL_TYPE:
2976 return fold (convert_to_integer (type, expr));
2977
2978 case POINTER_TYPE:
2979 case REFERENCE_TYPE:
2980 /* If converting between two pointers to records denoting
2981 both a template and type, adjust if needed to account
2982 for any differing offsets, since one might be negative. */
2983 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2984 {
2985 tree bit_diff
2986 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2987 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2988 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2989 sbitsize_int (BITS_PER_UNIT));
2990
2991 expr = build1 (NOP_EXPR, type, expr);
2992 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2993 if (integer_zerop (byte_diff))
2994 return expr;
2995
2996 return build_binary_op (PLUS_EXPR, type, expr,
2997 fold (convert_to_pointer (type, byte_diff)));
2998 }
2999
3000 /* If converting to a thin pointer, handle specially. */
3001 if (TYPE_THIN_POINTER_P (type)
3002 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3003 return convert_to_thin_pointer (type, expr);
3004
3005 /* If converting fat pointer to normal pointer, get the pointer to the
3006 array and then convert it. */
3007 else if (TYPE_FAT_POINTER_P (etype))
3008 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3009 NULL_TREE, 0);
3010
3011 return fold (convert_to_pointer (type, expr));
3012
3013 case REAL_TYPE:
3014 return fold (convert_to_real (type, expr));
3015
3016 case RECORD_TYPE:
3017 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3018 return
3019 gnat_build_constructor
3020 (type, tree_cons (TYPE_FIELDS (type),
3021 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3022 NULL_TREE));
3023
3024 /* ... fall through ... */
3025
3026 case ARRAY_TYPE:
3027 /* In these cases, assume the front-end has validated the conversion.
3028 If the conversion is valid, it will be a bit-wise conversion, so
3029 it can be viewed as an unchecked conversion. */
3030 return unchecked_convert (type, expr, 0);
3031
3032 case UNION_TYPE:
3033 /* Just validate that the type is indeed that of a field
3034 of the type. Then make the simple conversion. */
3035 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3036 {
3037 if (TREE_TYPE (tem) == etype)
3038 return build1 (CONVERT_EXPR, type, expr);
3039 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3040 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3041 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3042 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3043 return build1 (CONVERT_EXPR, type,
3044 convert (TREE_TYPE (tem), expr));
3045 }
3046
3047 gigi_abort (413);
3048
3049 case UNCONSTRAINED_ARRAY_TYPE:
3050 /* If EXPR is a constrained array, take its address, convert it to a
3051 fat pointer, and then dereference it. Likewise if EXPR is a
3052 record containing both a template and a constrained array.
3053 Note that a record representing a left justified modular type
3054 always represents a packed constrained array. */
3055 if (ecode == ARRAY_TYPE
3056 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3057 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3058 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3059 return
3060 build_unary_op
3061 (INDIRECT_REF, NULL_TREE,
3062 convert_to_fat_pointer (TREE_TYPE (type),
3063 build_unary_op (ADDR_EXPR,
3064 NULL_TREE, expr)));
3065
3066 /* Do something very similar for converting one unconstrained
3067 array to another. */
3068 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3069 return
3070 build_unary_op (INDIRECT_REF, NULL_TREE,
3071 convert (TREE_TYPE (type),
3072 build_unary_op (ADDR_EXPR,
3073 NULL_TREE, expr)));
3074 else
3075 gigi_abort (409);
3076
3077 case COMPLEX_TYPE:
3078 return fold (convert_to_complex (type, expr));
3079
3080 default:
3081 gigi_abort (410);
3082 }
3083 }
3084 \f
3085 /* Remove all conversions that are done in EXP. This includes converting
3086 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3087 is nonzero, always return the address of the containing object even if
3088 the address is not bit-aligned. */
3089
3090 tree
3091 remove_conversions (tree exp, int true_address)
3092 {
3093 switch (TREE_CODE (exp))
3094 {
3095 case CONSTRUCTOR:
3096 if (true_address
3097 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3098 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3099 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3100 break;
3101
3102 case COMPONENT_REF:
3103 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3104 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3105 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3106 break;
3107
3108 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3109 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3110 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3111
3112 default:
3113 break;
3114 }
3115
3116 return exp;
3117 }
3118 \f
3119 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3120 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3121 likewise return an expression pointing to the underlying array. */
3122
3123 tree
3124 maybe_unconstrained_array (tree exp)
3125 {
3126 enum tree_code code = TREE_CODE (exp);
3127 tree new;
3128
3129 switch (TREE_CODE (TREE_TYPE (exp)))
3130 {
3131 case UNCONSTRAINED_ARRAY_TYPE:
3132 if (code == UNCONSTRAINED_ARRAY_REF)
3133 {
3134 new
3135 = build_unary_op (INDIRECT_REF, NULL_TREE,
3136 build_component_ref (TREE_OPERAND (exp, 0),
3137 get_identifier ("P_ARRAY"),
3138 NULL_TREE, 0));
3139 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3140 return new;
3141 }
3142
3143 else if (code == NULL_EXPR)
3144 return build1 (NULL_EXPR,
3145 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3146 (TREE_TYPE (TREE_TYPE (exp))))),
3147 TREE_OPERAND (exp, 0));
3148
3149 case RECORD_TYPE:
3150 /* If this is a padded type, convert to the unpadded type and see if
3151 it contains a template. */
3152 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3153 {
3154 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3155 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3156 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3157 return
3158 build_component_ref (new, NULL_TREE,
3159 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3160 0);
3161 }
3162 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3163 return
3164 build_component_ref (exp, NULL_TREE,
3165 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3166 break;
3167
3168 default:
3169 break;
3170 }
3171
3172 return exp;
3173 }
3174 \f
3175 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3176 If NOTRUNC_P is set, truncation operations should be suppressed. */
3177
3178 tree
3179 unchecked_convert (tree type, tree expr, int notrunc_p)
3180 {
3181 tree etype = TREE_TYPE (expr);
3182
3183 /* If the expression is already the right type, we are done. */
3184 if (etype == type)
3185 return expr;
3186
3187 /* If both types types are integral just do a normal conversion.
3188 Likewise for a conversion to an unconstrained array. */
3189 if ((((INTEGRAL_TYPE_P (type)
3190 && ! (TREE_CODE (type) == INTEGER_TYPE
3191 && TYPE_VAX_FLOATING_POINT_P (type)))
3192 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3193 || (TREE_CODE (type) == RECORD_TYPE
3194 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3195 && ((INTEGRAL_TYPE_P (etype)
3196 && ! (TREE_CODE (etype) == INTEGER_TYPE
3197 && TYPE_VAX_FLOATING_POINT_P (etype)))
3198 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3199 || (TREE_CODE (etype) == RECORD_TYPE
3200 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3201 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3202 {
3203 tree rtype = type;
3204
3205 if (TREE_CODE (etype) == INTEGER_TYPE
3206 && TYPE_BIASED_REPRESENTATION_P (etype))
3207 {
3208 tree ntype = copy_type (etype);
3209
3210 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3211 TYPE_MAIN_VARIANT (ntype) = ntype;
3212 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3213 }
3214
3215 if (TREE_CODE (type) == INTEGER_TYPE
3216 && TYPE_BIASED_REPRESENTATION_P (type))
3217 {
3218 rtype = copy_type (type);
3219 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3220 TYPE_MAIN_VARIANT (rtype) = rtype;
3221 }
3222
3223 expr = convert (rtype, expr);
3224 if (type != rtype)
3225 expr = build1 (GNAT_NOP_EXPR, type, expr);
3226 }
3227
3228 /* If we are converting TO an integral type whose precision is not the
3229 same as its size, first unchecked convert to a record that contains
3230 an object of the output type. Then extract the field. */
3231 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3232 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3233 GET_MODE_BITSIZE (TYPE_MODE (type))))
3234 {
3235 tree rec_type = make_node (RECORD_TYPE);
3236 tree field = create_field_decl (get_identifier ("OBJ"), type,
3237 rec_type, 1, 0, 0, 0);
3238
3239 TYPE_FIELDS (rec_type) = field;
3240 layout_type (rec_type);
3241
3242 expr = unchecked_convert (rec_type, expr, notrunc_p);
3243 expr = build_component_ref (expr, NULL_TREE, field, 0);
3244 }
3245
3246 /* Similarly for integral input type whose precision is not equal to its
3247 size. */
3248 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3249 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3250 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3251 {
3252 tree rec_type = make_node (RECORD_TYPE);
3253 tree field
3254 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3255 1, 0, 0, 0);
3256
3257 TYPE_FIELDS (rec_type) = field;
3258 layout_type (rec_type);
3259
3260 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3261 expr = unchecked_convert (type, expr, notrunc_p);
3262 }
3263
3264 /* We have a special case when we are converting between two
3265 unconstrained array types. In that case, take the address,
3266 convert the fat pointer types, and dereference. */
3267 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3268 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3269 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3270 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3271 build_unary_op (ADDR_EXPR, NULL_TREE,
3272 expr)));
3273 else
3274 {
3275 expr = maybe_unconstrained_array (expr);
3276 etype = TREE_TYPE (expr);
3277 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3278 }
3279
3280 /* If the result is an integral type whose size is not equal to
3281 the size of the underlying machine type, sign- or zero-extend
3282 the result. We need not do this in the case where the input is
3283 an integral type of the same precision and signedness or if the output
3284 is a biased type or if both the input and output are unsigned. */
3285 if (! notrunc_p
3286 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3287 && ! (TREE_CODE (type) == INTEGER_TYPE
3288 && TYPE_BIASED_REPRESENTATION_P (type))
3289 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3290 GET_MODE_BITSIZE (TYPE_MODE (type)))
3291 && ! (INTEGRAL_TYPE_P (etype)
3292 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3293 && operand_equal_p (TYPE_RM_SIZE (type),
3294 (TYPE_RM_SIZE (etype) != 0
3295 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3296 0))
3297 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3298 {
3299 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3300 TYPE_UNSIGNED (type));
3301 tree shift_expr
3302 = convert (base_type,
3303 size_binop (MINUS_EXPR,
3304 bitsize_int
3305 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3306 TYPE_RM_SIZE (type)));
3307 expr
3308 = convert (type,
3309 build_binary_op (RSHIFT_EXPR, base_type,
3310 build_binary_op (LSHIFT_EXPR, base_type,
3311 convert (base_type, expr),
3312 shift_expr),
3313 shift_expr));
3314 }
3315
3316 /* An unchecked conversion should never raise Constraint_Error. The code
3317 below assumes that GCC's conversion routines overflow the same way that
3318 the underlying hardware does. This is probably true. In the rare case
3319 when it is false, we can rely on the fact that such conversions are
3320 erroneous anyway. */
3321 if (TREE_CODE (expr) == INTEGER_CST)
3322 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3323
3324 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3325 show no longer constant. */
3326 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3327 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3328 OEP_ONLY_CONST))
3329 TREE_CONSTANT (expr) = 0;
3330
3331 return expr;
3332 }
3333
3334 #include "gt-ada-utils.h"
3335 #include "gtype-ada.h"