decl.c: Remove calls to add_decl_expr...
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41 #include "cgraph.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
63
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
67
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
70
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
73
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
76
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
81
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
84
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
87 goes later. */
88 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
89
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types[NUM_MACHINE_MODES];
92
93 /* For each binding contour we allocate a binding_level structure to indicate
94 the binding depth. */
95
96 struct gnat_binding_level GTY((chain_next ("%h.chain")))
97 {
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level *chain;
100 /* The BLOCK node for this level. */
101 tree block;
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
104 tree jmpbuf_decl;
105 };
106
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level *current_binding_level;
109
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
112
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable)) tree free_block_chain;
115
116 struct language_function GTY(())
117 {
118 int unused;
119 };
120
121 static void gnat_define_builtin (const char *, tree, int, const char *, bool);
122 static void gnat_install_builtins (void);
123 static tree merge_sizes (tree, tree, tree, bool, bool);
124 static tree compute_related_constant (tree, tree);
125 static tree split_plus (tree, tree *);
126 static bool value_zerop (tree);
127 static void gnat_gimplify_function (tree);
128 static void gnat_finalize (tree);
129 static tree float_type_for_precision (int, enum machine_mode);
130 static tree convert_to_fat_pointer (tree, tree);
131 static tree convert_to_thin_pointer (tree, tree);
132 static tree make_descriptor_field (const char *,tree, tree, tree);
133 static bool value_factor_p (tree, HOST_WIDE_INT);
134 static bool potential_alignment_gap (tree, tree, tree);
135 \f
136 /* Initialize the association of GNAT nodes to GCC trees. */
137
138 void
139 init_gnat_to_gnu (void)
140 {
141 associate_gnat_to_gnu
142 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
143 }
144
145 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
146 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
147 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
148
149 If GNU_DECL is zero, a previous association is to be reset. */
150
151 void
152 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
153 {
154 /* Check that GNAT_ENTITY is not already defined and that it is being set
155 to something which is a decl. Raise gigi 401 if not. Usually, this
156 means GNAT_ENTITY is defined twice, but occasionally is due to some
157 Gigi problem. */
158 if (gnu_decl
159 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
160 || (! no_check && ! DECL_P (gnu_decl))))
161 gigi_abort (401);
162
163 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
164 }
165
166 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
167 Return the ..._DECL node that was associated with it. If there is no tree
168 node associated with GNAT_ENTITY, abort.
169
170 In some cases, such as delayed elaboration or expressions that need to
171 be elaborated only once, GNAT_ENTITY is really not an entity. */
172
173 tree
174 get_gnu_tree (Entity_Id gnat_entity)
175 {
176 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
177 gigi_abort (402);
178
179 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
180 }
181
182 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
183
184 int
185 present_gnu_tree (Entity_Id gnat_entity)
186 {
187 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
188 }
189
190 \f
191 /* Return non-zero if we are currently in the global binding level. */
192
193 int
194 global_bindings_p (void)
195 {
196 return (force_global != 0 || current_binding_level == 0
197 || current_binding_level->chain == 0 ? -1 : 0);
198 }
199
200 /* Enter a new binding level. */
201
202 void
203 gnat_pushlevel ()
204 {
205 struct gnat_binding_level *newlevel = NULL;
206
207 /* Reuse a struct for this binding level, if there is one. */
208 if (free_binding_level)
209 {
210 newlevel = free_binding_level;
211 free_binding_level = free_binding_level->chain;
212 }
213 else
214 newlevel
215 = (struct gnat_binding_level *)
216 ggc_alloc (sizeof (struct gnat_binding_level));
217
218 /* Use a free BLOCK, if any; otherwise, allocate one. */
219 if (free_block_chain)
220 {
221 newlevel->block = free_block_chain;
222 free_block_chain = TREE_CHAIN (free_block_chain);
223 TREE_CHAIN (newlevel->block) = NULL_TREE;
224 }
225 else
226 newlevel->block = make_node (BLOCK);
227
228 /* Point the BLOCK we just made to its parent. */
229 if (current_binding_level)
230 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
231
232 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
233
234 /* Add this level to the front of the chain (stack) of levels that are
235 active. */
236 newlevel->chain = current_binding_level;
237 newlevel->jmpbuf_decl = NULL_TREE;
238 current_binding_level = newlevel;
239 }
240
241 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
242 and point FNDECL to this BLOCK. */
243
244 void
245 set_current_block_context (tree fndecl)
246 {
247 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
248 DECL_INITIAL (fndecl) = current_binding_level->block;
249 }
250
251 /* Set the jmpbuf_decl for the current binding level to DECL. */
252
253 void
254 set_block_jmpbuf_decl (tree decl)
255 {
256 current_binding_level->jmpbuf_decl = decl;
257 }
258
259 /* Get the jmpbuf_decl, if any, for the current binding level. */
260
261 tree
262 get_block_jmpbuf_decl ()
263 {
264 return current_binding_level->jmpbuf_decl;
265 }
266
267 /* Exit a binding level. Set any BLOCK into the current code group. */
268
269 void
270 gnat_poplevel ()
271 {
272 struct gnat_binding_level *level = current_binding_level;
273 tree block = level->block;
274
275 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
276 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
277
278 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
279 are no variables free the block and merge its subblocks into those of its
280 parent block. Otherwise, add it to the list of its parent. */
281 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
282 ;
283 else if (BLOCK_VARS (block) == NULL_TREE)
284 {
285 BLOCK_SUBBLOCKS (level->chain->block)
286 = chainon (BLOCK_SUBBLOCKS (block),
287 BLOCK_SUBBLOCKS (level->chain->block));
288 TREE_CHAIN (block) = free_block_chain;
289 free_block_chain = block;
290 }
291 else
292 {
293 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
294 BLOCK_SUBBLOCKS (level->chain->block) = block;
295 TREE_USED (block) = 1;
296 set_block_for_group (block);
297 }
298
299 /* Free this binding structure. */
300 current_binding_level = level->chain;
301 level->chain = free_binding_level;
302 free_binding_level = level;
303 }
304
305 /* Insert BLOCK at the end of the list of subblocks of the
306 current binding level. This is used when a BIND_EXPR is expanded,
307 to handle the BLOCK node inside the BIND_EXPR. */
308
309 void
310 insert_block (tree block)
311 {
312 TREE_USED (block) = 1;
313 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
314 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
315 }
316 \f
317 /* Records a ..._DECL node DECL as belonging to the current lexical scope
318 and uses GNAT_NODE for location information. */
319
320 void
321 gnat_pushdecl (tree decl, Node_Id gnat_node)
322 {
323 /* If at top level, there is no context. But PARM_DECLs always go in the
324 level of its function. */
325 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
326 DECL_CONTEXT (decl) = 0;
327 else
328 DECL_CONTEXT (decl) = current_function_decl;
329
330 /* Set the location of DECL and emit a declaration for it. */
331 if (Present (gnat_node))
332 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
333 add_decl_expr (decl, gnat_node);
334
335 /* Put the declaration on the list. The list of declarations is in reverse
336 order. The list will be reversed later. We don't do this for global
337 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
338 the list. They will cause trouble with the debugger and aren't needed
339 anyway. */
340 if (!global_bindings_p ()
341 && (TREE_CODE (decl) != TYPE_DECL
342 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE))
343 {
344 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
345 BLOCK_VARS (current_binding_level->block) = decl;
346 }
347
348 /* For the declaration of a type, set its name if it either is not already
349 set, was set to an IDENTIFIER_NODE, indicating an internal name,
350 or if the previous type name was not derived from a source name.
351 We'd rather have the type named with a real name and all the pointer
352 types to the same object have the same POINTER_TYPE node. Code in this
353 function in c-decl.c makes a copy of the type node here, but that may
354 cause us trouble with incomplete types, so let's not try it (at least
355 for now). */
356
357 if (TREE_CODE (decl) == TYPE_DECL
358 && DECL_NAME (decl) != 0
359 && (TYPE_NAME (TREE_TYPE (decl)) == 0
360 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
361 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
362 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
363 && ! DECL_ARTIFICIAL (decl))))
364 TYPE_NAME (TREE_TYPE (decl)) = decl;
365
366 if (TREE_CODE (decl) != CONST_DECL)
367 rest_of_decl_compilation (decl, NULL, global_bindings_p (), 0);
368 }
369 \f
370 /* Do little here. Set up the standard declarations later after the
371 front end has been run. */
372
373 void
374 gnat_init_decl_processing (void)
375 {
376 input_line = 0;
377
378 /* Make the binding_level structure for global names. */
379 current_function_decl = 0;
380 current_binding_level = 0;
381 free_binding_level = 0;
382 gnat_pushlevel ();
383
384 build_common_tree_nodes (0);
385
386 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
387 corresponding to the size of Pmode. In most cases when ptr_mode and
388 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
389 far better code using the width of Pmode. Make this here since we need
390 this before we can expand the GNAT types. */
391 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
392 set_sizetype (size_type_node);
393 build_common_tree_nodes_2 (0);
394
395 /* Give names and make TYPE_DECLs for common types. */
396 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
397 Empty);
398 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
399 integer_type_node),
400 Empty);
401 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
402 char_type_node),
403 Empty);
404 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
405 long_integer_type_node),
406 Empty);
407 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
408 void_type_node),
409 Empty);
410
411 ptr_void_type_node = build_pointer_type (void_type_node);
412
413 gnat_install_builtins ();
414 }
415
416 /* Define a builtin function. This is temporary and is just being done
417 to initialize implicit_built_in_decls for the middle-end. We'll want
418 to do full builtin processing soon. */
419
420 static void
421 gnat_define_builtin (const char *name, tree type,
422 int function_code, const char *library_name, bool const_p)
423 {
424 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
425
426 DECL_EXTERNAL (decl) = 1;
427 TREE_PUBLIC (decl) = 1;
428 if (library_name)
429 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
430 make_decl_rtl (decl, NULL);
431 gnat_pushdecl (decl, Empty);
432 DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
433 DECL_FUNCTION_CODE (decl) = function_code;
434 TREE_READONLY (decl) = const_p;
435
436 implicit_built_in_decls[function_code] = decl;
437 }
438
439 /* Install the builtin functions the middle-end needs. */
440
441 static void
442 gnat_install_builtins ()
443 {
444 tree ftype;
445 tree tmp;
446
447 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
448 tmp = tree_cons (NULL_TREE, long_integer_type_node, tmp);
449 ftype = build_function_type (long_integer_type_node, tmp);
450 gnat_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT,
451 "__builtin_expect", true);
452
453 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
454 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
455 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
456 ftype = build_function_type (ptr_void_type_node, tmp);
457 gnat_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
458 "memcpy", false);
459
460 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
461 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
462 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
463 ftype = build_function_type (integer_type_node, tmp);
464 gnat_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
465 "memcmp", false);
466
467 tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
468 ftype = build_function_type (integer_type_node, tmp);
469 gnat_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "clz", true);
470
471 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
472 ftype = build_function_type (integer_type_node, tmp);
473 gnat_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "clzl", true);
474
475 tmp = tree_cons (NULL_TREE, long_long_integer_type_node, void_list_node);
476 ftype = build_function_type (integer_type_node, tmp);
477 gnat_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "clzll",
478 true);
479
480 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
481 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
482 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
483 ftype = build_function_type (void_type_node, tmp);
484 gnat_define_builtin ("__builtin_init_trampoline", ftype,
485 BUILT_IN_INIT_TRAMPOLINE, "init_trampoline", false);
486
487 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
488 ftype = build_function_type (ptr_void_type_node, tmp);
489 gnat_define_builtin ("__builtin_adjust_trampoline", ftype,
490 BUILT_IN_ADJUST_TRAMPOLINE, "adjust_trampoline", true);
491
492 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
493 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
494 ftype = build_function_type (ptr_void_type_node, tmp);
495 gnat_define_builtin ("__builtin_stack_alloc", ftype, BUILT_IN_STACK_ALLOC,
496 "stack_alloc", false);
497
498 /* The stack_save and stack_restore builtins aren't used directly. They
499 are inserted during gimplification to implement stack_alloc calls. */
500 ftype = build_function_type (ptr_void_type_node, void_list_node);
501 gnat_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
502 "stack_save", false);
503 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
504 ftype = build_function_type (void_type_node, tmp);
505 gnat_define_builtin ("__builtin_stack_restore", ftype,
506 BUILT_IN_STACK_RESTORE, "stack_restore", false);
507 }
508
509 /* Create the predefined scalar types such as `integer_type_node' needed
510 in the gcc back-end and initialize the global binding level. */
511
512 void
513 init_gigi_decls (tree long_long_float_type, tree exception_type)
514 {
515 tree endlink, decl;
516 unsigned int i;
517
518 /* Set the types that GCC and Gigi use from the front end. We would like
519 to do this for char_type_node, but it needs to correspond to the C
520 char type. */
521 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
522 {
523 /* In this case, the builtin floating point types are VAX float,
524 so make up a type for use. */
525 longest_float_type_node = make_node (REAL_TYPE);
526 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
527 layout_type (longest_float_type_node);
528 create_type_decl (get_identifier ("longest float type"),
529 longest_float_type_node, NULL, 0, 1, Empty);
530 }
531 else
532 longest_float_type_node = TREE_TYPE (long_long_float_type);
533
534 except_type_node = TREE_TYPE (exception_type);
535
536 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
537 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
538 NULL, 0, 1, Empty);
539
540 void_type_decl_node = create_type_decl (get_identifier ("void"),
541 void_type_node, NULL, 0, 1, Empty);
542
543 void_ftype = build_function_type (void_type_node, NULL_TREE);
544 ptr_void_ftype = build_pointer_type (void_ftype);
545
546 /* Now declare runtime functions. */
547 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
548
549 /* malloc is a function declaration tree for a function to allocate
550 memory. */
551 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
552 NULL_TREE,
553 build_function_type (ptr_void_type_node,
554 tree_cons (NULL_TREE,
555 sizetype,
556 endlink)),
557 NULL_TREE, 0, 1, 1, 0, Empty);
558
559 /* free is a function declaration tree for a function to free memory. */
560 free_decl
561 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
562 build_function_type (void_type_node,
563 tree_cons (NULL_TREE,
564 ptr_void_type_node,
565 endlink)),
566 NULL_TREE, 0, 1, 1, 0, Empty);
567
568 /* Make the types and functions used for exception processing. */
569 jmpbuf_type
570 = build_array_type (gnat_type_for_mode (Pmode, 0),
571 build_index_type (build_int_2 (5, 0)));
572 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
573 0, 1, Empty);
574 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
575
576 /* Functions to get and set the jumpbuf pointer for the current thread. */
577 get_jmpbuf_decl
578 = create_subprog_decl
579 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
580 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
581 NULL_TREE, 0, 1, 1, 0, Empty);
582
583 set_jmpbuf_decl
584 = create_subprog_decl
585 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
586 NULL_TREE,
587 build_function_type (void_type_node,
588 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
589 NULL_TREE, 0, 1, 1, 0, Empty);
590
591 /* Function to get the current exception. */
592 get_excptr_decl
593 = create_subprog_decl
594 (get_identifier ("system__soft_links__get_gnat_exception"),
595 NULL_TREE,
596 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
597 NULL_TREE, 0, 1, 1, 0, Empty);
598
599 /* Functions that raise exceptions. */
600 raise_nodefer_decl
601 = create_subprog_decl
602 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
603 build_function_type (void_type_node,
604 tree_cons (NULL_TREE,
605 build_pointer_type (except_type_node),
606 endlink)),
607 NULL_TREE, 0, 1, 1, 0, Empty);
608
609 /* Hooks to call when entering/leaving an exception handler. */
610 begin_handler_decl
611 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
612 build_function_type (void_type_node,
613 tree_cons (NULL_TREE,
614 ptr_void_type_node,
615 endlink)),
616 NULL_TREE, 0, 1, 1, 0, Empty);
617
618 end_handler_decl
619 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
620 build_function_type (void_type_node,
621 tree_cons (NULL_TREE,
622 ptr_void_type_node,
623 endlink)),
624 NULL_TREE, 0, 1, 1, 0, Empty);
625
626 /* If in no exception handlers mode, all raise statements are redirected to
627 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
628 this procedure will never be called in this mode. */
629 if (No_Exception_Handlers_Set ())
630 {
631 decl
632 = create_subprog_decl
633 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
634 build_function_type (void_type_node,
635 tree_cons (NULL_TREE,
636 build_pointer_type (char_type_node),
637 tree_cons (NULL_TREE,
638 integer_type_node,
639 endlink))),
640 NULL_TREE, 0, 1, 1, 0, Empty);
641
642 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
643 gnat_raise_decls[i] = decl;
644 }
645 else
646 /* Otherwise, make one decl for each exception reason. */
647 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
648 {
649 char name[17];
650
651 sprintf (name, "__gnat_rcheck_%.2d", i);
652 gnat_raise_decls[i]
653 = create_subprog_decl
654 (get_identifier (name), NULL_TREE,
655 build_function_type (void_type_node,
656 tree_cons (NULL_TREE,
657 build_pointer_type
658 (char_type_node),
659 tree_cons (NULL_TREE,
660 integer_type_node,
661 endlink))),
662 NULL_TREE, 0, 1, 1, 0, Empty);
663 }
664
665 /* Indicate that these never return. */
666 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
667 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
668 TREE_TYPE (raise_nodefer_decl)
669 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
670 TYPE_QUAL_VOLATILE);
671
672 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
673 {
674 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
675 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
676 TREE_TYPE (gnat_raise_decls[i])
677 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
678 TYPE_QUAL_VOLATILE);
679 }
680
681 /* setjmp returns an integer and has one operand, which is a pointer to
682 a jmpbuf. */
683 setjmp_decl
684 = create_subprog_decl
685 (get_identifier ("__builtin_setjmp"), NULL_TREE,
686 build_function_type (integer_type_node,
687 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
688 NULL_TREE, 0, 1, 1, 0, Empty);
689
690 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
691 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
692
693 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
694 address. */
695 update_setjmp_buf_decl
696 = create_subprog_decl
697 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
698 build_function_type (void_type_node,
699 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
700 NULL_TREE, 0, 1, 1, 0, Empty);
701
702 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
703 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
704
705 main_identifier_node = get_identifier ("main");
706 }
707 \f
708 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
709 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
710 nonzero, this record has a rep clause; don't call layout_type but merely set
711 the size and alignment ourselves. If DEFER_DEBUG is nonzero, do not call
712 the debugging routines on this type; it will be done later. */
713
714 void
715 finish_record_type (tree record_type, tree fieldlist, int has_rep,
716 int defer_debug)
717 {
718 enum tree_code code = TREE_CODE (record_type);
719 tree ada_size = bitsize_zero_node;
720 tree size = bitsize_zero_node;
721 tree size_unit = size_zero_node;
722 int var_size = 0;
723 tree field;
724
725 TYPE_FIELDS (record_type) = fieldlist;
726 TYPE_STUB_DECL (record_type)
727 = build_decl (TYPE_DECL, NULL_TREE, record_type);
728
729 /* We don't need both the typedef name and the record name output in
730 the debugging information, since they are the same. */
731 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
732
733 /* Globally initialize the record first. If this is a rep'ed record,
734 that just means some initializations; otherwise, layout the record. */
735
736 if (has_rep)
737 {
738 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
739 TYPE_MODE (record_type) = BLKmode;
740 if (TYPE_SIZE (record_type) == 0)
741 {
742 TYPE_SIZE (record_type) = bitsize_zero_node;
743 TYPE_SIZE_UNIT (record_type) = size_zero_node;
744 }
745 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
746 out just like a UNION_TYPE, since the size will be fixed. */
747 else if (code == QUAL_UNION_TYPE)
748 code = UNION_TYPE;
749 }
750 else
751 {
752 /* Ensure there isn't a size already set. There can be in an error
753 case where there is a rep clause but all fields have errors and
754 no longer have a position. */
755 TYPE_SIZE (record_type) = 0;
756 layout_type (record_type);
757 }
758
759 /* At this point, the position and size of each field is known. It was
760 either set before entry by a rep clause, or by laying out the type above.
761
762 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
763 to compute the Ada size; the GCC size and alignment (for rep'ed records
764 that are not padding types); and the mode (for rep'ed records). We also
765 clear the DECL_BIT_FIELD indication for the cases we know have not been
766 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
767
768 if (code == QUAL_UNION_TYPE)
769 fieldlist = nreverse (fieldlist);
770
771 for (field = fieldlist; field; field = TREE_CHAIN (field))
772 {
773 tree pos = bit_position (field);
774
775 tree type = TREE_TYPE (field);
776 tree this_size = DECL_SIZE (field);
777 tree this_size_unit = DECL_SIZE_UNIT (field);
778 tree this_ada_size = DECL_SIZE (field);
779
780 /* We need to make an XVE/XVU record if any field has variable size,
781 whether or not the record does. For example, if we have an union,
782 it may be that all fields, rounded up to the alignment, have the
783 same size, in which case we'll use that size. But the debug
784 output routines (except Dwarf2) won't be able to output the fields,
785 so we need to make the special record. */
786 if (TREE_CODE (this_size) != INTEGER_CST)
787 var_size = 1;
788
789 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
790 || TREE_CODE (type) == QUAL_UNION_TYPE)
791 && ! TYPE_IS_FAT_POINTER_P (type)
792 && ! TYPE_CONTAINS_TEMPLATE_P (type)
793 && TYPE_ADA_SIZE (type) != 0)
794 this_ada_size = TYPE_ADA_SIZE (type);
795
796 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
797 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
798 && value_factor_p (pos, BITS_PER_UNIT)
799 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
800 DECL_BIT_FIELD (field) = 0;
801
802 /* If we still have DECL_BIT_FIELD set at this point, we know the field
803 is technically not addressable. Except that it can actually be
804 addressed if the field is BLKmode and happens to be properly
805 aligned. */
806 DECL_NONADDRESSABLE_P (field)
807 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
808
809 if (has_rep && ! DECL_BIT_FIELD (field))
810 TYPE_ALIGN (record_type)
811 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
812
813 switch (code)
814 {
815 case UNION_TYPE:
816 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
817 size = size_binop (MAX_EXPR, size, this_size);
818 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
819 break;
820
821 case QUAL_UNION_TYPE:
822 ada_size
823 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
824 this_ada_size, ada_size));
825 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
826 this_size, size));
827 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
828 this_size_unit, size_unit));
829 break;
830
831 case RECORD_TYPE:
832 /* Since we know here that all fields are sorted in order of
833 increasing bit position, the size of the record is one
834 higher than the ending bit of the last field processed
835 unless we have a rep clause, since in that case we might
836 have a field outside a QUAL_UNION_TYPE that has a higher ending
837 position. So use a MAX in that case. Also, if this field is a
838 QUAL_UNION_TYPE, we need to take into account the previous size in
839 the case of empty variants. */
840 ada_size
841 = merge_sizes (ada_size, pos, this_ada_size,
842 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
843 size = merge_sizes (size, pos, this_size,
844 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
845 size_unit
846 = merge_sizes (size_unit, byte_position (field), this_size_unit,
847 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
848 break;
849
850 default:
851 abort ();
852 }
853 }
854
855 if (code == QUAL_UNION_TYPE)
856 nreverse (fieldlist);
857
858 /* If this is a padding record, we never want to make the size smaller than
859 what was specified in it, if any. */
860 if (TREE_CODE (record_type) == RECORD_TYPE
861 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
862 {
863 size = TYPE_SIZE (record_type);
864 size_unit = TYPE_SIZE_UNIT (record_type);
865 }
866
867 /* Now set any of the values we've just computed that apply. */
868 if (! TYPE_IS_FAT_POINTER_P (record_type)
869 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
870 SET_TYPE_ADA_SIZE (record_type, ada_size);
871
872 if (has_rep)
873 {
874 if (! (TREE_CODE (record_type) == RECORD_TYPE
875 && TYPE_IS_PADDING_P (record_type)
876 && CONTAINS_PLACEHOLDER_P (size)))
877 {
878 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
879 TYPE_SIZE_UNIT (record_type)
880 = round_up (size_unit,
881 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
882 }
883
884 compute_record_mode (record_type);
885 }
886
887 if (! defer_debug)
888 {
889 /* If this record is of variable size, rename it so that the
890 debugger knows it is and make a new, parallel, record
891 that tells the debugger how the record is laid out. See
892 exp_dbug.ads. But don't do this for records that are padding
893 since they confuse GDB. */
894 if (var_size
895 && ! (TREE_CODE (record_type) == RECORD_TYPE
896 && TYPE_IS_PADDING_P (record_type)))
897 {
898 tree new_record_type
899 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
900 ? UNION_TYPE : TREE_CODE (record_type));
901 tree orig_name = TYPE_NAME (record_type);
902 tree orig_id
903 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
904 : orig_name);
905 tree new_id
906 = concat_id_with_name (orig_id,
907 TREE_CODE (record_type) == QUAL_UNION_TYPE
908 ? "XVU" : "XVE");
909 tree last_pos = bitsize_zero_node;
910 tree old_field;
911 tree prev_old_field = 0;
912
913 TYPE_NAME (new_record_type) = new_id;
914 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
915 TYPE_STUB_DECL (new_record_type)
916 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
917 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
918 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
919 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
920 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
921
922 /* Now scan all the fields, replacing each field with a new
923 field corresponding to the new encoding. */
924 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
925 old_field = TREE_CHAIN (old_field))
926 {
927 tree field_type = TREE_TYPE (old_field);
928 tree field_name = DECL_NAME (old_field);
929 tree new_field;
930 tree curpos = bit_position (old_field);
931 int var = 0;
932 unsigned int align = 0;
933 tree pos;
934
935 /* See how the position was modified from the last position.
936
937 There are two basic cases we support: a value was added
938 to the last position or the last position was rounded to
939 a boundary and they something was added. Check for the
940 first case first. If not, see if there is any evidence
941 of rounding. If so, round the last position and try
942 again.
943
944 If this is a union, the position can be taken as zero. */
945
946 if (TREE_CODE (new_record_type) == UNION_TYPE)
947 pos = bitsize_zero_node, align = 0;
948 else
949 pos = compute_related_constant (curpos, last_pos);
950
951 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
952 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
953 {
954 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
955 pos = compute_related_constant (curpos,
956 round_up (last_pos, align));
957 }
958 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
959 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
960 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
961 && host_integerp (TREE_OPERAND
962 (TREE_OPERAND (curpos, 0), 1),
963 1))
964 {
965 align
966 = tree_low_cst
967 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
968 pos = compute_related_constant (curpos,
969 round_up (last_pos, align));
970 }
971 else if (potential_alignment_gap (prev_old_field, old_field,
972 pos))
973 {
974 align = TYPE_ALIGN (field_type);
975 pos = compute_related_constant (curpos,
976 round_up (last_pos, align));
977 }
978
979 /* If we can't compute a position, set it to zero.
980
981 ??? We really should abort here, but it's too much work
982 to get this correct for all cases. */
983
984 if (pos == 0)
985 pos = bitsize_zero_node;
986
987 /* See if this type is variable-size and make a new type
988 and indicate the indirection if so. */
989 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
990 {
991 field_type = build_pointer_type (field_type);
992 var = 1;
993 }
994
995 /* Make a new field name, if necessary. */
996 if (var || align != 0)
997 {
998 char suffix[6];
999
1000 if (align != 0)
1001 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1002 align / BITS_PER_UNIT);
1003 else
1004 strcpy (suffix, "XVL");
1005
1006 field_name = concat_id_with_name (field_name, suffix);
1007 }
1008
1009 new_field = create_field_decl (field_name, field_type,
1010 new_record_type, 0,
1011 DECL_SIZE (old_field), pos, 0);
1012 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1013 TYPE_FIELDS (new_record_type) = new_field;
1014
1015 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1016 zero. The only time it's not the last field of the record
1017 is when there are other components at fixed positions after
1018 it (meaning there was a rep clause for every field) and we
1019 want to be able to encode them. */
1020 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1021 (TREE_CODE (TREE_TYPE (old_field))
1022 == QUAL_UNION_TYPE)
1023 ? bitsize_zero_node
1024 : DECL_SIZE (old_field));
1025 prev_old_field = old_field;
1026 }
1027
1028 TYPE_FIELDS (new_record_type)
1029 = nreverse (TYPE_FIELDS (new_record_type));
1030
1031 rest_of_type_compilation (new_record_type, global_bindings_p ());
1032 }
1033
1034 rest_of_type_compilation (record_type, global_bindings_p ());
1035 }
1036 }
1037
1038 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1039 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1040 if this represents a QUAL_UNION_TYPE in which case we must look for
1041 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1042 is nonzero, we must take the MAX of the end position of this field
1043 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1044
1045 We return an expression for the size. */
1046
1047 static tree
1048 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1049 bool has_rep)
1050 {
1051 tree type = TREE_TYPE (last_size);
1052 tree new;
1053
1054 if (! special || TREE_CODE (size) != COND_EXPR)
1055 {
1056 new = size_binop (PLUS_EXPR, first_bit, size);
1057 if (has_rep)
1058 new = size_binop (MAX_EXPR, last_size, new);
1059 }
1060
1061 else
1062 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1063 integer_zerop (TREE_OPERAND (size, 1))
1064 ? last_size : merge_sizes (last_size, first_bit,
1065 TREE_OPERAND (size, 1),
1066 1, has_rep),
1067 integer_zerop (TREE_OPERAND (size, 2))
1068 ? last_size : merge_sizes (last_size, first_bit,
1069 TREE_OPERAND (size, 2),
1070 1, has_rep)));
1071
1072 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1073 when fed through substitute_in_expr) into thinking that a constant
1074 size is not constant. */
1075 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1076 new = TREE_OPERAND (new, 0);
1077
1078 return new;
1079 }
1080
1081 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1082 related by the addition of a constant. Return that constant if so. */
1083
1084 static tree
1085 compute_related_constant (tree op0, tree op1)
1086 {
1087 tree op0_var, op1_var;
1088 tree op0_con = split_plus (op0, &op0_var);
1089 tree op1_con = split_plus (op1, &op1_var);
1090 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1091
1092 if (operand_equal_p (op0_var, op1_var, 0))
1093 return result;
1094 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1095 return result;
1096 else
1097 return 0;
1098 }
1099
1100 /* Utility function of above to split a tree OP which may be a sum, into a
1101 constant part, which is returned, and a variable part, which is stored
1102 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1103 bitsizetype. */
1104
1105 static tree
1106 split_plus (tree in, tree *pvar)
1107 {
1108 /* Strip NOPS in order to ease the tree traversal and maximize the
1109 potential for constant or plus/minus discovery. We need to be careful
1110 to always return and set *pvar to bitsizetype trees, but it's worth
1111 the effort. */
1112 STRIP_NOPS (in);
1113
1114 *pvar = convert (bitsizetype, in);
1115
1116 if (TREE_CODE (in) == INTEGER_CST)
1117 {
1118 *pvar = bitsize_zero_node;
1119 return convert (bitsizetype, in);
1120 }
1121 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1122 {
1123 tree lhs_var, rhs_var;
1124 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1125 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1126
1127 if (lhs_var == TREE_OPERAND (in, 0)
1128 && rhs_var == TREE_OPERAND (in, 1))
1129 return bitsize_zero_node;
1130
1131 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1132 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1133 }
1134 else
1135 return bitsize_zero_node;
1136 }
1137 \f
1138 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1139 subprogram. If it is void_type_node, then we are dealing with a procedure,
1140 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1141 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1142 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1143 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1144 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1145 RETURNS_WITH_DSP is nonzero if the function is to return with a
1146 depressed stack pointer. */
1147 tree
1148 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1149 int returns_unconstrained, int returns_by_ref,
1150 int returns_with_dsp)
1151 {
1152 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1153 the subprogram formal parameters. This list is generated by traversing the
1154 input list of PARM_DECL nodes. */
1155 tree param_type_list = NULL;
1156 tree param_decl;
1157 tree type;
1158
1159 for (param_decl = param_decl_list; param_decl;
1160 param_decl = TREE_CHAIN (param_decl))
1161 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1162 param_type_list);
1163
1164 /* The list of the function parameter types has to be terminated by the void
1165 type to signal to the back-end that we are not dealing with a variable
1166 parameter subprogram, but that the subprogram has a fixed number of
1167 parameters. */
1168 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1169
1170 /* The list of argument types has been created in reverse
1171 so nreverse it. */
1172 param_type_list = nreverse (param_type_list);
1173
1174 type = build_function_type (return_type, param_type_list);
1175
1176 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1177 or the new type should, make a copy of TYPE. Likewise for
1178 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1179 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1180 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1181 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1182 type = copy_type (type);
1183
1184 SET_TYPE_CI_CO_LIST (type, cico_list);
1185 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1186 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1187 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1188 return type;
1189 }
1190 \f
1191 /* Return a copy of TYPE but safe to modify in any way. */
1192
1193 tree
1194 copy_type (tree type)
1195 {
1196 tree new = copy_node (type);
1197
1198 /* copy_node clears this field instead of copying it, because it is
1199 aliased with TREE_CHAIN. */
1200 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1201
1202 TYPE_POINTER_TO (new) = 0;
1203 TYPE_REFERENCE_TO (new) = 0;
1204 TYPE_MAIN_VARIANT (new) = new;
1205 TYPE_NEXT_VARIANT (new) = 0;
1206
1207 return new;
1208 }
1209 \f
1210 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1211 TYPE_INDEX_TYPE is INDEX. */
1212
1213 tree
1214 create_index_type (tree min, tree max, tree index)
1215 {
1216 /* First build a type for the desired range. */
1217 tree type = build_index_2_type (min, max);
1218
1219 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1220 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1221 is set, but not to INDEX, make a copy of this type with the requested
1222 index type. Note that we have no way of sharing these types, but that's
1223 only a small hole. */
1224 if (TYPE_INDEX_TYPE (type) == index)
1225 return type;
1226 else if (TYPE_INDEX_TYPE (type) != 0)
1227 type = copy_type (type);
1228
1229 SET_TYPE_INDEX_TYPE (type, index);
1230 create_type_decl (NULL_TREE, type, NULL, 1, 0, Empty);
1231 return type;
1232 }
1233 \f
1234 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1235 string) and TYPE is a ..._TYPE node giving its data type.
1236 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1237 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1238 information about this type. GNAT_NODE is used for the position of
1239 the decl. */
1240
1241 tree
1242 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1243 int artificial_p, int debug_info_p, Node_Id gnat_node)
1244 {
1245 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1246 enum tree_code code = TREE_CODE (type);
1247
1248 DECL_ARTIFICIAL (type_decl) = artificial_p;
1249
1250 process_attributes (type_decl, attr_list);
1251
1252 /* Pass type declaration information to the debugger unless this is an
1253 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1254 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1255 a dummy type, which will be completed later, or a type for which
1256 debugging information was not requested. */
1257 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1258 || ! debug_info_p)
1259 DECL_IGNORED_P (type_decl) = 1;
1260 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1261 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1262 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1263 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1264
1265 if (!TYPE_IS_DUMMY_P (type))
1266 gnat_pushdecl (type_decl, gnat_node);
1267
1268 return type_decl;
1269 }
1270
1271 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1272 ASM_NAME is its assembler name (if provided). TYPE is its data type
1273 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1274 expression; NULL_TREE if none.
1275
1276 CONST_FLAG is nonzero if this variable is constant.
1277
1278 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1279 the current compilation unit. This flag should be set when processing the
1280 variable definitions in a package specification. EXTERN_FLAG is nonzero
1281 when processing an external variable declaration (as opposed to a
1282 definition: no storage is to be allocated for the variable here).
1283
1284 STATIC_FLAG is only relevant when not at top level. In that case
1285 it indicates whether to always allocate storage to the variable.
1286
1287 GNAT_NODE is used for the position of the decl. */
1288
1289 tree
1290 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1291 int const_flag, int public_flag, int extern_flag,
1292 int static_flag, struct attrib *attr_list, Node_Id gnat_node)
1293 {
1294 int init_const
1295 = (var_init == 0
1296 ? 0
1297 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1298 && (global_bindings_p () || static_flag
1299 ? 0 != initializer_constant_valid_p (var_init,
1300 TREE_TYPE (var_init))
1301 : TREE_CONSTANT (var_init))));
1302 tree var_decl
1303 = build_decl ((const_flag && init_const
1304 /* Only make a CONST_DECL for sufficiently-small objects.
1305 We consider complex double "sufficiently-small" */
1306 && TYPE_SIZE (type) != 0
1307 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1308 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1309 GET_MODE_SIZE (DCmode)))
1310 ? CONST_DECL : VAR_DECL, var_name, type);
1311
1312 /* If this is external, throw away any initializations unless this is a
1313 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1314 If we are defining a global here, leave a constant initialization and
1315 save any variable elaborations for the elaboration routine. If we are
1316 just annotating types, throw away the initialization if it isn't a
1317 constant. */
1318 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1319 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1320 var_init = 0;
1321
1322 DECL_INITIAL (var_decl) = var_init;
1323 TREE_READONLY (var_decl) = const_flag;
1324 DECL_EXTERNAL (var_decl) = extern_flag;
1325 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1326 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1327 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1328 = TYPE_VOLATILE (type);
1329
1330 /* At the global binding level we need to allocate static storage for the
1331 variable if and only if its not external. If we are not at the top level
1332 we allocate automatic storage unless requested not to. */
1333 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1334
1335 if (asm_name != 0)
1336 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1337
1338 process_attributes (var_decl, attr_list);
1339
1340 /* Add this decl to the current binding level. */
1341 gnat_pushdecl (var_decl, gnat_node);
1342
1343 if (TREE_SIDE_EFFECTS (var_decl))
1344 TREE_ADDRESSABLE (var_decl) = 1;
1345
1346 if (TREE_CODE (var_decl) != CONST_DECL)
1347 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1348
1349 return var_decl;
1350 }
1351 \f
1352 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1353 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1354 this field is in a record type with a "pragma pack". If SIZE is nonzero
1355 it is the specified size for this field. If POS is nonzero, it is the bit
1356 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1357 the address of this field for aliasing purposes. */
1358
1359 tree
1360 create_field_decl (tree field_name, tree field_type, tree record_type,
1361 int packed, tree size, tree pos, int addressable)
1362 {
1363 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1364
1365 DECL_CONTEXT (field_decl) = record_type;
1366 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1367
1368 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1369 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1370 if (packed && TYPE_MODE (field_type) == BLKmode)
1371 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1372
1373 /* If a size is specified, use it. Otherwise, if the record type is packed
1374 compute a size to use, which may differ from the object's natural size.
1375 We always set a size in this case to trigger the checks for bitfield
1376 creation below, which is typically required when no position has been
1377 specified. */
1378 if (size != 0)
1379 size = convert (bitsizetype, size);
1380 else if (packed == 1)
1381 {
1382 size = rm_size (field_type);
1383
1384 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1385 byte. */
1386 if (TREE_CODE (size) == INTEGER_CST
1387 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1388 size = round_up (size, BITS_PER_UNIT);
1389 }
1390
1391 /* Make a bitfield if a size is specified for two reasons: first if the size
1392 differs from the natural size. Second, if the alignment is insufficient.
1393 There are a number of ways the latter can be true.
1394
1395 We never make a bitfield if the type of the field has a nonconstant size,
1396 or if it is claimed to be addressable, because no such entity requiring
1397 bitfield operations should reach here.
1398
1399 We do *preventively* make a bitfield when there might be the need for it
1400 but we don't have all the necessary information to decide, as is the case
1401 of a field with no specified position in a packed record.
1402
1403 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1404 in layout_decl or finish_record_type to clear the bit_field indication if
1405 it is in fact not needed. */
1406 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1407 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1408 && ! addressable
1409 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1410 || (pos != 0
1411 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1412 bitsize_int (TYPE_ALIGN
1413 (field_type)))))
1414 || packed
1415 || (TYPE_ALIGN (record_type) != 0
1416 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1417 {
1418 DECL_BIT_FIELD (field_decl) = 1;
1419 DECL_SIZE (field_decl) = size;
1420 if (! packed && pos == 0)
1421 DECL_ALIGN (field_decl)
1422 = (TYPE_ALIGN (record_type) != 0
1423 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1424 : TYPE_ALIGN (field_type));
1425 }
1426
1427 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1428 DECL_ALIGN (field_decl)
1429 = MAX (DECL_ALIGN (field_decl),
1430 DECL_BIT_FIELD (field_decl) ? 1
1431 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1432 : TYPE_ALIGN (field_type));
1433
1434 if (pos != 0)
1435 {
1436 /* We need to pass in the alignment the DECL is known to have.
1437 This is the lowest-order bit set in POS, but no more than
1438 the alignment of the record, if one is specified. Note
1439 that an alignment of 0 is taken as infinite. */
1440 unsigned int known_align;
1441
1442 if (host_integerp (pos, 1))
1443 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1444 else
1445 known_align = BITS_PER_UNIT;
1446
1447 if (TYPE_ALIGN (record_type)
1448 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1449 known_align = TYPE_ALIGN (record_type);
1450
1451 layout_decl (field_decl, known_align);
1452 SET_DECL_OFFSET_ALIGN (field_decl,
1453 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1454 : BITS_PER_UNIT);
1455 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1456 &DECL_FIELD_BIT_OFFSET (field_decl),
1457 DECL_OFFSET_ALIGN (field_decl), pos);
1458
1459 DECL_HAS_REP_P (field_decl) = 1;
1460 }
1461
1462 /* If the field type is passed by reference, we will have pointers to the
1463 field, so it is addressable. */
1464 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1465 addressable = 1;
1466
1467 /* ??? For now, we say that any field of aggregate type is addressable
1468 because the front end may take 'Reference of it. */
1469 if (AGGREGATE_TYPE_P (field_type))
1470 addressable = 1;
1471
1472 /* Mark the decl as nonaddressable if it is indicated so semantically,
1473 meaning we won't ever attempt to take the address of the field.
1474
1475 It may also be "technically" nonaddressable, meaning that even if we
1476 attempt to take the field's address we will actually get the address of a
1477 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1478 we have at this point is not accurate enough, so we don't account for
1479 this here and let finish_record_type decide. */
1480 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1481
1482 return field_decl;
1483 }
1484
1485 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1486 effects, has the value of zero. */
1487
1488 static bool
1489 value_zerop (tree exp)
1490 {
1491 if (TREE_CODE (exp) == COMPOUND_EXPR)
1492 return value_zerop (TREE_OPERAND (exp, 1));
1493
1494 return integer_zerop (exp);
1495 }
1496 \f
1497 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1498 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1499 readonly (either an IN parameter or an address of a pass-by-ref
1500 parameter). */
1501
1502 tree
1503 create_param_decl (tree param_name, tree param_type, int readonly)
1504 {
1505 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1506
1507 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1508 lead to various ABI violations. */
1509 if (targetm.calls.promote_prototypes (param_type)
1510 && (TREE_CODE (param_type) == INTEGER_TYPE
1511 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1512 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1513 {
1514 /* We have to be careful about biased types here. Make a subtype
1515 of integer_type_node with the proper biasing. */
1516 if (TREE_CODE (param_type) == INTEGER_TYPE
1517 && TYPE_BIASED_REPRESENTATION_P (param_type))
1518 {
1519 param_type
1520 = copy_type (build_range_type (integer_type_node,
1521 TYPE_MIN_VALUE (param_type),
1522 TYPE_MAX_VALUE (param_type)));
1523
1524 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1525 }
1526 else
1527 param_type = integer_type_node;
1528 }
1529
1530 DECL_ARG_TYPE (param_decl) = param_type;
1531 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1532 TREE_READONLY (param_decl) = readonly;
1533 return param_decl;
1534 }
1535 \f
1536 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1537
1538 void
1539 process_attributes (tree decl, struct attrib *attr_list)
1540 {
1541 for (; attr_list; attr_list = attr_list->next)
1542 switch (attr_list->type)
1543 {
1544 case ATTR_MACHINE_ATTRIBUTE:
1545 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1546 NULL_TREE),
1547 ATTR_FLAG_TYPE_IN_PLACE);
1548 break;
1549
1550 case ATTR_LINK_ALIAS:
1551 TREE_STATIC (decl) = 1;
1552 assemble_alias (decl, attr_list->name);
1553 break;
1554
1555 case ATTR_WEAK_EXTERNAL:
1556 if (SUPPORTS_WEAK)
1557 declare_weak (decl);
1558 else
1559 post_error ("?weak declarations not supported on this target",
1560 attr_list->error_point);
1561 break;
1562
1563 case ATTR_LINK_SECTION:
1564 if (targetm.have_named_sections)
1565 {
1566 DECL_SECTION_NAME (decl)
1567 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1568 IDENTIFIER_POINTER (attr_list->name));
1569 }
1570 else
1571 post_error ("?section attributes are not supported for this target",
1572 attr_list->error_point);
1573 break;
1574 }
1575 }
1576 \f
1577 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1578 a power of 2. */
1579
1580 static bool
1581 value_factor_p (tree value, HOST_WIDE_INT factor)
1582 {
1583 if (host_integerp (value, 1))
1584 return tree_low_cst (value, 1) % factor == 0;
1585
1586 if (TREE_CODE (value) == MULT_EXPR)
1587 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1588 || value_factor_p (TREE_OPERAND (value, 1), factor));
1589
1590 return 0;
1591 }
1592
1593 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1594 unless we can prove these 2 fields are laid out in such a way that no gap
1595 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1596 is the distance in bits between the end of PREV_FIELD and the starting
1597 position of CURR_FIELD. It is ignored if null. */
1598
1599 static bool
1600 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1601 {
1602 /* If this is the first field of the record, there cannot be any gap */
1603 if (!prev_field)
1604 return 0;
1605
1606 /* If the previous field is a union type, then return False: The only
1607 time when such a field is not the last field of the record is when
1608 there are other components at fixed positions after it (meaning there
1609 was a rep clause for every field), in which case we don't want the
1610 alignment constraint to override them. */
1611 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1612 return 0;
1613
1614 /* If the distance between the end of prev_field and the begining of
1615 curr_field is constant, then there is a gap if the value of this
1616 constant is not null. */
1617 if (offset && host_integerp (offset, 1))
1618 return (!integer_zerop (offset));
1619
1620 /* If the size and position of the previous field are constant,
1621 then check the sum of this size and position. There will be a gap
1622 iff it is not multiple of the current field alignment. */
1623 if (host_integerp (DECL_SIZE (prev_field), 1)
1624 && host_integerp (bit_position (prev_field), 1))
1625 return ((tree_low_cst (bit_position (prev_field), 1)
1626 + tree_low_cst (DECL_SIZE (prev_field), 1))
1627 % DECL_ALIGN (curr_field) != 0);
1628
1629 /* If both the position and size of the previous field are multiples
1630 of the current field alignment, there can not be any gap. */
1631 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1632 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1633 return 0;
1634
1635 /* Fallback, return that there may be a potential gap */
1636 return 1;
1637 }
1638
1639 /* Returns a LABEL_DECL node for LABEL_NAME. */
1640
1641 tree
1642 create_label_decl (tree label_name)
1643 {
1644 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1645
1646 DECL_CONTEXT (label_decl) = current_function_decl;
1647 DECL_MODE (label_decl) = VOIDmode;
1648 DECL_SOURCE_LOCATION (label_decl) = input_location;
1649
1650 return label_decl;
1651 }
1652 \f
1653 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1654 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1655 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1656 PARM_DECL nodes chained through the TREE_CHAIN field).
1657
1658 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1659 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1660
1661 tree
1662 create_subprog_decl (tree subprog_name, tree asm_name,
1663 tree subprog_type, tree param_decl_list, int inline_flag,
1664 int public_flag, int extern_flag,
1665 struct attrib *attr_list, Node_Id gnat_node)
1666 {
1667 tree return_type = TREE_TYPE (subprog_type);
1668 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1669
1670 /* If this is a function nested inside an inlined external function, it
1671 means we aren't going to compile the outer function unless it is
1672 actually inlined, so do the same for us. */
1673 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1674 && DECL_EXTERNAL (current_function_decl))
1675 extern_flag = 1;
1676
1677 DECL_EXTERNAL (subprog_decl) = extern_flag;
1678 TREE_PUBLIC (subprog_decl) = public_flag;
1679 TREE_STATIC (subprog_decl) = 1;
1680 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1681 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1682 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1683 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1684 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1685
1686 if (inline_flag)
1687 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1688
1689 if (asm_name != 0)
1690 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1691
1692 process_attributes (subprog_decl, attr_list);
1693
1694 /* Add this decl to the current binding level. */
1695 gnat_pushdecl (subprog_decl, gnat_node);
1696
1697 /* Output the assembler code and/or RTL for the declaration. */
1698 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1699
1700 return subprog_decl;
1701 }
1702 \f
1703 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1704 body. This routine needs to be invoked before processing the declarations
1705 appearing in the subprogram. */
1706
1707 void
1708 begin_subprog_body (tree subprog_decl)
1709 {
1710 tree param_decl;
1711
1712 current_function_decl = subprog_decl;
1713 announce_function (subprog_decl);
1714
1715 /* Enter a new binding level and show that all the parameters belong to
1716 this function. */
1717 gnat_pushlevel ();
1718 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1719 param_decl = TREE_CHAIN (param_decl))
1720 DECL_CONTEXT (param_decl) = subprog_decl;
1721
1722 make_decl_rtl (subprog_decl, NULL);
1723
1724 /* We handle pending sizes via the elaboration of types, so we don't need to
1725 save them. This causes them to be marked as part of the outer function
1726 and then discarded. */
1727 get_pending_sizes ();
1728 }
1729
1730 /* Finish the definition of the current subprogram and compile it all the way
1731 to assembler language output. BODY is the tree corresponding to
1732 the subprogram. */
1733
1734 void
1735 end_subprog_body (tree body)
1736 {
1737 tree fndecl = current_function_decl;
1738
1739 /* Mark the BLOCK for this level as being for this function and pop the
1740 level. Since the vars in it are the parameters, clear them. */
1741 BLOCK_VARS (current_binding_level->block) = 0;
1742 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1743 DECL_INITIAL (fndecl) = current_binding_level->block;
1744 gnat_poplevel ();
1745
1746 /* Deal with inline. If declared inline or we should default to inline,
1747 set the flag in the decl. */
1748 DECL_INLINE (fndecl)
1749 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1750
1751 /* We handle pending sizes via the elaboration of types, so we don't
1752 need to save them. */
1753 get_pending_sizes ();
1754
1755 /* Mark the RESULT_DECL as being in this subprogram. */
1756 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1757
1758 DECL_SAVED_TREE (fndecl) = body;
1759
1760 current_function_decl = DECL_CONTEXT (fndecl);
1761 cfun = NULL;
1762
1763 /* If we're only annotating types, don't actually compile this function. */
1764 if (type_annotate_only)
1765 return;
1766
1767 /* We do different things for nested and non-nested functions.
1768 ??? This should be in cgraph. */
1769 if (!DECL_CONTEXT (fndecl))
1770 {
1771 gnat_gimplify_function (fndecl);
1772 lower_nested_functions (fndecl);
1773 gnat_finalize (fndecl);
1774 }
1775 else
1776 /* Register this function with cgraph just far enough to get it
1777 added to our parent's nested function list. */
1778 (void) cgraph_node (fndecl);
1779 }
1780
1781 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1782
1783 static void
1784 gnat_gimplify_function (tree fndecl)
1785 {
1786 struct cgraph_node *cgn;
1787
1788 dump_function (TDI_original, fndecl);
1789 gimplify_function_tree (fndecl);
1790 dump_function (TDI_generic, fndecl);
1791
1792 /* Convert all nested functions to GIMPLE now. We do things in this order
1793 so that items like VLA sizes are expanded properly in the context of the
1794 correct function. */
1795 cgn = cgraph_node (fndecl);
1796 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1797 gnat_gimplify_function (cgn->decl);
1798 }
1799
1800 /* Give FNDECL and all its nested functions to cgraph for compilation. */
1801
1802 static void
1803 gnat_finalize (tree fndecl)
1804 {
1805 struct cgraph_node *cgn;
1806
1807 /* Finalize all nested functions now. */
1808 cgn = cgraph_node (fndecl);
1809 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
1810 gnat_finalize (cgn->decl);
1811
1812 cgraph_finalize_function (fndecl, false);
1813 }
1814 \f
1815 /* Return a definition for a builtin function named NAME and whose data type
1816 is TYPE. TYPE should be a function type with argument types.
1817 FUNCTION_CODE tells later passes how to compile calls to this function.
1818 See tree.h for its possible values.
1819
1820 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1821 the name to be called if we can't opencode the function. If
1822 ATTRS is nonzero, use that for the function attribute list. */
1823
1824 tree
1825 builtin_function (const char *name, tree type, int function_code,
1826 enum built_in_class class, const char *library_name,
1827 tree attrs)
1828 {
1829 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1830
1831 DECL_EXTERNAL (decl) = 1;
1832 TREE_PUBLIC (decl) = 1;
1833 if (library_name)
1834 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1835
1836 gnat_pushdecl (decl, Empty);
1837 DECL_BUILT_IN_CLASS (decl) = class;
1838 DECL_FUNCTION_CODE (decl) = function_code;
1839 if (attrs)
1840 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1841 return decl;
1842 }
1843
1844 /* Return an integer type with the number of bits of precision given by
1845 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1846 it is a signed type. */
1847
1848 tree
1849 gnat_type_for_size (unsigned precision, int unsignedp)
1850 {
1851 tree t;
1852 char type_name[20];
1853
1854 if (precision <= 2 * MAX_BITS_PER_WORD
1855 && signed_and_unsigned_types[precision][unsignedp] != 0)
1856 return signed_and_unsigned_types[precision][unsignedp];
1857
1858 if (unsignedp)
1859 t = make_unsigned_type (precision);
1860 else
1861 t = make_signed_type (precision);
1862
1863 if (precision <= 2 * MAX_BITS_PER_WORD)
1864 signed_and_unsigned_types[precision][unsignedp] = t;
1865
1866 if (TYPE_NAME (t) == 0)
1867 {
1868 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1869 TYPE_NAME (t) = get_identifier (type_name);
1870 }
1871
1872 return t;
1873 }
1874
1875 /* Likewise for floating-point types. */
1876
1877 static tree
1878 float_type_for_precision (int precision, enum machine_mode mode)
1879 {
1880 tree t;
1881 char type_name[20];
1882
1883 if (float_types[(int) mode] != 0)
1884 return float_types[(int) mode];
1885
1886 float_types[(int) mode] = t = make_node (REAL_TYPE);
1887 TYPE_PRECISION (t) = precision;
1888 layout_type (t);
1889
1890 if (TYPE_MODE (t) != mode)
1891 gigi_abort (414);
1892
1893 if (TYPE_NAME (t) == 0)
1894 {
1895 sprintf (type_name, "FLOAT_%d", precision);
1896 TYPE_NAME (t) = get_identifier (type_name);
1897 }
1898
1899 return t;
1900 }
1901
1902 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1903 an unsigned type; otherwise a signed type is returned. */
1904
1905 tree
1906 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1907 {
1908 if (mode == BLKmode)
1909 return NULL_TREE;
1910 else if (mode == VOIDmode)
1911 return void_type_node;
1912 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1913 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1914 else
1915 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1916 }
1917
1918 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1919
1920 tree
1921 gnat_unsigned_type (tree type_node)
1922 {
1923 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1924
1925 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1926 {
1927 type = copy_node (type);
1928 TREE_TYPE (type) = type_node;
1929 }
1930 else if (TREE_TYPE (type_node) != 0
1931 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1932 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1933 {
1934 type = copy_node (type);
1935 TREE_TYPE (type) = TREE_TYPE (type_node);
1936 }
1937
1938 return type;
1939 }
1940
1941 /* Return the signed version of a TYPE_NODE, a scalar type. */
1942
1943 tree
1944 gnat_signed_type (tree type_node)
1945 {
1946 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1947
1948 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1949 {
1950 type = copy_node (type);
1951 TREE_TYPE (type) = type_node;
1952 }
1953 else if (TREE_TYPE (type_node) != 0
1954 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1955 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1956 {
1957 type = copy_node (type);
1958 TREE_TYPE (type) = TREE_TYPE (type_node);
1959 }
1960
1961 return type;
1962 }
1963
1964 /* Return a type the same as TYPE except unsigned or signed according to
1965 UNSIGNEDP. */
1966
1967 tree
1968 gnat_signed_or_unsigned_type (int unsignedp, tree type)
1969 {
1970 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
1971 return type;
1972 else
1973 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
1974 }
1975 \f
1976 /* EXP is an expression for the size of an object. If this size contains
1977 discriminant references, replace them with the maximum (if MAX_P) or
1978 minimum (if ! MAX_P) possible value of the discriminant. */
1979
1980 tree
1981 max_size (tree exp, int max_p)
1982 {
1983 enum tree_code code = TREE_CODE (exp);
1984 tree type = TREE_TYPE (exp);
1985
1986 switch (TREE_CODE_CLASS (code))
1987 {
1988 case 'd':
1989 case 'c':
1990 return exp;
1991
1992 case 'x':
1993 if (code == TREE_LIST)
1994 return tree_cons (TREE_PURPOSE (exp),
1995 max_size (TREE_VALUE (exp), max_p),
1996 TREE_CHAIN (exp) != 0
1997 ? max_size (TREE_CHAIN (exp), max_p) : 0);
1998 break;
1999
2000 case 'r':
2001 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2002 modify. Otherwise, we treat it like a variable. */
2003 if (! CONTAINS_PLACEHOLDER_P (exp))
2004 return exp;
2005
2006 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2007 return
2008 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2009
2010 case '<':
2011 return max_p ? size_one_node : size_zero_node;
2012
2013 case '1':
2014 case '2':
2015 case 'e':
2016 switch (TREE_CODE_LENGTH (code))
2017 {
2018 case 1:
2019 if (code == NON_LVALUE_EXPR)
2020 return max_size (TREE_OPERAND (exp, 0), max_p);
2021 else
2022 return
2023 fold (build1 (code, type,
2024 max_size (TREE_OPERAND (exp, 0),
2025 code == NEGATE_EXPR ? ! max_p : max_p)));
2026
2027 case 2:
2028 if (code == RTL_EXPR)
2029 gigi_abort (407);
2030 else if (code == COMPOUND_EXPR)
2031 return max_size (TREE_OPERAND (exp, 1), max_p);
2032
2033 {
2034 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2035 tree rhs = max_size (TREE_OPERAND (exp, 1),
2036 code == MINUS_EXPR ? ! max_p : max_p);
2037
2038 /* Special-case wanting the maximum value of a MIN_EXPR.
2039 In that case, if one side overflows, return the other.
2040 sizetype is signed, but we know sizes are non-negative.
2041 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2042 overflowing or the maximum possible value and the RHS
2043 a variable. */
2044 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2045 return lhs;
2046 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2047 return rhs;
2048 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2049 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2050 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2051 && ! TREE_CONSTANT (rhs))
2052 return lhs;
2053 else
2054 return fold (build (code, type, lhs, rhs));
2055 }
2056
2057 case 3:
2058 if (code == SAVE_EXPR)
2059 return exp;
2060 else if (code == COND_EXPR)
2061 return fold (build (max_p ? MAX_EXPR : MIN_EXPR, type,
2062 max_size (TREE_OPERAND (exp, 1), max_p),
2063 max_size (TREE_OPERAND (exp, 2), max_p)));
2064 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2065 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2066 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2067 }
2068 }
2069
2070 gigi_abort (408);
2071 }
2072 \f
2073 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2074 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2075 Return a constructor for the template. */
2076
2077 tree
2078 build_template (tree template_type, tree array_type, tree expr)
2079 {
2080 tree template_elts = NULL_TREE;
2081 tree bound_list = NULL_TREE;
2082 tree field;
2083
2084 if (TREE_CODE (array_type) == RECORD_TYPE
2085 && (TYPE_IS_PADDING_P (array_type)
2086 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2087 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2088
2089 if (TREE_CODE (array_type) == ARRAY_TYPE
2090 || (TREE_CODE (array_type) == INTEGER_TYPE
2091 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2092 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2093
2094 /* First make the list for a CONSTRUCTOR for the template. Go down the
2095 field list of the template instead of the type chain because this
2096 array might be an Ada array of arrays and we can't tell where the
2097 nested arrays stop being the underlying object. */
2098
2099 for (field = TYPE_FIELDS (template_type); field;
2100 (bound_list != 0
2101 ? (bound_list = TREE_CHAIN (bound_list))
2102 : (array_type = TREE_TYPE (array_type))),
2103 field = TREE_CHAIN (TREE_CHAIN (field)))
2104 {
2105 tree bounds, min, max;
2106
2107 /* If we have a bound list, get the bounds from there. Likewise
2108 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2109 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2110 This will give us a maximum range. */
2111 if (bound_list != 0)
2112 bounds = TREE_VALUE (bound_list);
2113 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2114 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2115 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2116 && DECL_BY_COMPONENT_PTR_P (expr))
2117 bounds = TREE_TYPE (field);
2118 else
2119 gigi_abort (411);
2120
2121 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2122 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2123
2124 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2125 substitute it from OBJECT. */
2126 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2127 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2128
2129 template_elts = tree_cons (TREE_CHAIN (field), max,
2130 tree_cons (field, min, template_elts));
2131 }
2132
2133 return gnat_build_constructor (template_type, nreverse (template_elts));
2134 }
2135 \f
2136 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2137 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2138 in the type contains in its DECL_INITIAL the expression to use when
2139 a constructor is made for the type. GNAT_ENTITY is an entity used
2140 to print out an error message if the mechanism cannot be applied to
2141 an object of that type and also for the name. */
2142
2143 tree
2144 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2145 {
2146 tree record_type = make_node (RECORD_TYPE);
2147 tree field_list = 0;
2148 int class;
2149 int dtype = 0;
2150 tree inner_type;
2151 int ndim;
2152 int i;
2153 tree *idx_arr;
2154 tree tem;
2155
2156 /* If TYPE is an unconstrained array, use the underlying array type. */
2157 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2158 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2159
2160 /* If this is an array, compute the number of dimensions in the array,
2161 get the index types, and point to the inner type. */
2162 if (TREE_CODE (type) != ARRAY_TYPE)
2163 ndim = 0;
2164 else
2165 for (ndim = 1, inner_type = type;
2166 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2167 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2168 ndim++, inner_type = TREE_TYPE (inner_type))
2169 ;
2170
2171 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2172
2173 if (mech != By_Descriptor_NCA
2174 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2175 for (i = ndim - 1, inner_type = type;
2176 i >= 0;
2177 i--, inner_type = TREE_TYPE (inner_type))
2178 idx_arr[i] = TYPE_DOMAIN (inner_type);
2179 else
2180 for (i = 0, inner_type = type;
2181 i < ndim;
2182 i++, inner_type = TREE_TYPE (inner_type))
2183 idx_arr[i] = TYPE_DOMAIN (inner_type);
2184
2185 /* Now get the DTYPE value. */
2186 switch (TREE_CODE (type))
2187 {
2188 case INTEGER_TYPE:
2189 case ENUMERAL_TYPE:
2190 if (TYPE_VAX_FLOATING_POINT_P (type))
2191 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2192 {
2193 case 6:
2194 dtype = 10;
2195 break;
2196 case 9:
2197 dtype = 11;
2198 break;
2199 case 15:
2200 dtype = 27;
2201 break;
2202 }
2203 else
2204 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2205 {
2206 case 8:
2207 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2208 break;
2209 case 16:
2210 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2211 break;
2212 case 32:
2213 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2214 break;
2215 case 64:
2216 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2217 break;
2218 case 128:
2219 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2220 break;
2221 }
2222 break;
2223
2224 case REAL_TYPE:
2225 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2226 break;
2227
2228 case COMPLEX_TYPE:
2229 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2230 && TYPE_VAX_FLOATING_POINT_P (type))
2231 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2232 {
2233 case 6:
2234 dtype = 12;
2235 break;
2236 case 9:
2237 dtype = 13;
2238 break;
2239 case 15:
2240 dtype = 29;
2241 }
2242 else
2243 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2244 break;
2245
2246 case ARRAY_TYPE:
2247 dtype = 14;
2248 break;
2249
2250 default:
2251 break;
2252 }
2253
2254 /* Get the CLASS value. */
2255 switch (mech)
2256 {
2257 case By_Descriptor_A:
2258 class = 4;
2259 break;
2260 case By_Descriptor_NCA:
2261 class = 10;
2262 break;
2263 case By_Descriptor_SB:
2264 class = 15;
2265 break;
2266 default:
2267 class = 1;
2268 }
2269
2270 /* Make the type for a descriptor for VMS. The first four fields
2271 are the same for all types. */
2272
2273 field_list
2274 = chainon (field_list,
2275 make_descriptor_field
2276 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2277 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2278
2279 field_list = chainon (field_list,
2280 make_descriptor_field ("DTYPE",
2281 gnat_type_for_size (8, 1),
2282 record_type, size_int (dtype)));
2283 field_list = chainon (field_list,
2284 make_descriptor_field ("CLASS",
2285 gnat_type_for_size (8, 1),
2286 record_type, size_int (class)));
2287
2288 field_list
2289 = chainon (field_list,
2290 make_descriptor_field
2291 ("POINTER",
2292 build_pointer_type_for_mode (type, SImode, false), record_type,
2293 build1 (ADDR_EXPR,
2294 build_pointer_type_for_mode (type, SImode, false),
2295 build (PLACEHOLDER_EXPR, type))));
2296
2297 switch (mech)
2298 {
2299 case By_Descriptor:
2300 case By_Descriptor_S:
2301 break;
2302
2303 case By_Descriptor_SB:
2304 field_list
2305 = chainon (field_list,
2306 make_descriptor_field
2307 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2308 TREE_CODE (type) == ARRAY_TYPE
2309 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2310 field_list
2311 = chainon (field_list,
2312 make_descriptor_field
2313 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2314 TREE_CODE (type) == ARRAY_TYPE
2315 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2316 break;
2317
2318 case By_Descriptor_A:
2319 case By_Descriptor_NCA:
2320 field_list = chainon (field_list,
2321 make_descriptor_field ("SCALE",
2322 gnat_type_for_size (8, 1),
2323 record_type,
2324 size_zero_node));
2325
2326 field_list = chainon (field_list,
2327 make_descriptor_field ("DIGITS",
2328 gnat_type_for_size (8, 1),
2329 record_type,
2330 size_zero_node));
2331
2332 field_list
2333 = chainon (field_list,
2334 make_descriptor_field
2335 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2336 size_int (mech == By_Descriptor_NCA
2337 ? 0
2338 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2339 : (TREE_CODE (type) == ARRAY_TYPE
2340 && TYPE_CONVENTION_FORTRAN_P (type)
2341 ? 224 : 192))));
2342
2343 field_list = chainon (field_list,
2344 make_descriptor_field ("DIMCT",
2345 gnat_type_for_size (8, 1),
2346 record_type,
2347 size_int (ndim)));
2348
2349 field_list = chainon (field_list,
2350 make_descriptor_field ("ARSIZE",
2351 gnat_type_for_size (32, 1),
2352 record_type,
2353 size_in_bytes (type)));
2354
2355 /* Now build a pointer to the 0,0,0... element. */
2356 tem = build (PLACEHOLDER_EXPR, type);
2357 for (i = 0, inner_type = type; i < ndim;
2358 i++, inner_type = TREE_TYPE (inner_type))
2359 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2360 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2361 NULL_TREE, NULL_TREE);
2362
2363 field_list
2364 = chainon (field_list,
2365 make_descriptor_field
2366 ("A0",
2367 build_pointer_type_for_mode (inner_type, SImode, false),
2368 record_type,
2369 build1 (ADDR_EXPR,
2370 build_pointer_type_for_mode (inner_type, SImode,
2371 false),
2372 tem)));
2373
2374 /* Next come the addressing coefficients. */
2375 tem = size_int (1);
2376 for (i = 0; i < ndim; i++)
2377 {
2378 char fname[3];
2379 tree idx_length
2380 = size_binop (MULT_EXPR, tem,
2381 size_binop (PLUS_EXPR,
2382 size_binop (MINUS_EXPR,
2383 TYPE_MAX_VALUE (idx_arr[i]),
2384 TYPE_MIN_VALUE (idx_arr[i])),
2385 size_int (1)));
2386
2387 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2388 fname[1] = '0' + i, fname[2] = 0;
2389 field_list
2390 = chainon (field_list,
2391 make_descriptor_field (fname,
2392 gnat_type_for_size (32, 1),
2393 record_type, idx_length));
2394
2395 if (mech == By_Descriptor_NCA)
2396 tem = idx_length;
2397 }
2398
2399 /* Finally here are the bounds. */
2400 for (i = 0; i < ndim; i++)
2401 {
2402 char fname[3];
2403
2404 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2405 field_list
2406 = chainon (field_list,
2407 make_descriptor_field
2408 (fname, gnat_type_for_size (32, 1), record_type,
2409 TYPE_MIN_VALUE (idx_arr[i])));
2410
2411 fname[0] = 'U';
2412 field_list
2413 = chainon (field_list,
2414 make_descriptor_field
2415 (fname, gnat_type_for_size (32, 1), record_type,
2416 TYPE_MAX_VALUE (idx_arr[i])));
2417 }
2418 break;
2419
2420 default:
2421 post_error ("unsupported descriptor type for &", gnat_entity);
2422 }
2423
2424 finish_record_type (record_type, field_list, 0, 1);
2425 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2426 NULL, 1, 0, gnat_entity);
2427
2428 return record_type;
2429 }
2430
2431 /* Utility routine for above code to make a field. */
2432
2433 static tree
2434 make_descriptor_field (const char *name, tree type,
2435 tree rec_type, tree initial)
2436 {
2437 tree field
2438 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2439
2440 DECL_INITIAL (field) = initial;
2441 return field;
2442 }
2443 \f
2444 /* Build a type to be used to represent an aliased object whose nominal
2445 type is an unconstrained array. This consists of a RECORD_TYPE containing
2446 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2447 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2448 is used to represent an arbitrary unconstrained object. Use NAME
2449 as the name of the record. */
2450
2451 tree
2452 build_unc_object_type (tree template_type, tree object_type, tree name)
2453 {
2454 tree type = make_node (RECORD_TYPE);
2455 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2456 template_type, type, 0, 0, 0, 1);
2457 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2458 type, 0, 0, 0, 1);
2459
2460 TYPE_NAME (type) = name;
2461 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2462 finish_record_type (type,
2463 chainon (chainon (NULL_TREE, template_field),
2464 array_field),
2465 0, 0);
2466
2467 return type;
2468 }
2469 \f
2470 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2471 the normal case this is just two adjustments, but we have more to do
2472 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2473
2474 void
2475 update_pointer_to (tree old_type, tree new_type)
2476 {
2477 tree ptr = TYPE_POINTER_TO (old_type);
2478 tree ref = TYPE_REFERENCE_TO (old_type);
2479 tree ptr1, ref1;
2480 tree type;
2481
2482 /* If this is the main variant, process all the other variants first. */
2483 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2484 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2485 type = TYPE_NEXT_VARIANT (type))
2486 update_pointer_to (type, new_type);
2487
2488 /* If no pointer or reference, we are done. */
2489 if (ptr == 0 && ref == 0)
2490 return;
2491
2492 /* Merge the old type qualifiers in the new type.
2493
2494 Each old variant has qualifiers for specific reasons, and the new
2495 designated type as well. Each set of qualifiers represents useful
2496 information grabbed at some point, and merging the two simply unifies
2497 these inputs into the final type description.
2498
2499 Consider for instance a volatile type frozen after an access to constant
2500 type designating it. After the designated type freeze, we get here with a
2501 volatile new_type and a dummy old_type with a readonly variant, created
2502 when the access type was processed. We shall make a volatile and readonly
2503 designated type, because that's what it really is.
2504
2505 We might also get here for a non-dummy old_type variant with different
2506 qualifiers than the new_type ones, for instance in some cases of pointers
2507 to private record type elaboration (see the comments around the call to
2508 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2509 qualifiers in thoses cases too, to avoid accidentally discarding the
2510 initial set, and will often end up with old_type == new_type then. */
2511 new_type = build_qualified_type (new_type,
2512 TYPE_QUALS (old_type)
2513 | TYPE_QUALS (new_type));
2514
2515 /* If the new type and the old one are identical, there is nothing to
2516 update. */
2517 if (old_type == new_type)
2518 return;
2519
2520 /* Otherwise, first handle the simple case. */
2521 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2522 {
2523 TYPE_POINTER_TO (new_type) = ptr;
2524 TYPE_REFERENCE_TO (new_type) = ref;
2525
2526 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2527 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2528 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2529 {
2530 TREE_TYPE (ptr1) = new_type;
2531
2532 if (TYPE_NAME (ptr1) != 0
2533 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2534 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2535 rest_of_decl_compilation (TYPE_NAME (ptr1), NULL,
2536 global_bindings_p (), 0);
2537 }
2538
2539 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2540 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2541 ref1 = TYPE_NEXT_VARIANT (ref1))
2542 {
2543 TREE_TYPE (ref1) = new_type;
2544
2545 if (TYPE_NAME (ref1) != 0
2546 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2547 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2548 rest_of_decl_compilation (TYPE_NAME (ref1), NULL,
2549 global_bindings_p (), 0);
2550 }
2551 }
2552
2553 /* Now deal with the unconstrained array case. In this case the "pointer"
2554 is actually a RECORD_TYPE where the types of both fields are
2555 pointers to void. In that case, copy the field list from the
2556 old type to the new one and update the fields' context. */
2557 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2558 gigi_abort (412);
2559
2560 else
2561 {
2562 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2563 tree ptr_temp_type;
2564 tree new_ref;
2565 tree var;
2566
2567 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2568 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2569 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2570
2571 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2572 template bounds.
2573
2574 ??? This is now the only use of gnat_substitute_in_type, which
2575 is now a very "heavy" routine to do this, so it should be replaced
2576 at some point. */
2577 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2578 new_ref = build (COMPONENT_REF, ptr_temp_type,
2579 build (PLACEHOLDER_EXPR, ptr),
2580 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2581
2582 update_pointer_to
2583 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2584 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2585 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2586
2587 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2588 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2589
2590 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2591 = TREE_TYPE (new_type) = ptr;
2592
2593 /* Now handle updating the allocation record, what the thin pointer
2594 points to. Update all pointers from the old record into the new
2595 one, update the types of the fields, and recompute the size. */
2596
2597 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2598
2599 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2600 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2601 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2602 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2603 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2604 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2605 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2606
2607 TYPE_SIZE (new_obj_rec)
2608 = size_binop (PLUS_EXPR,
2609 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2610 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2611 TYPE_SIZE_UNIT (new_obj_rec)
2612 = size_binop (PLUS_EXPR,
2613 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2614 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2615 rest_of_type_compilation (ptr, global_bindings_p ());
2616 }
2617 }
2618 \f
2619 /* Convert a pointer to a constrained array into a pointer to a fat
2620 pointer. This involves making or finding a template. */
2621
2622 static tree
2623 convert_to_fat_pointer (tree type, tree expr)
2624 {
2625 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2626 tree template, template_addr;
2627 tree etype = TREE_TYPE (expr);
2628
2629 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2630 pointer to the template and array. */
2631 if (integer_zerop (expr))
2632 return
2633 gnat_build_constructor
2634 (type,
2635 tree_cons (TYPE_FIELDS (type),
2636 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2637 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2638 convert (build_pointer_type (template_type),
2639 expr),
2640 NULL_TREE)));
2641
2642 /* If EXPR is a thin pointer, make the template and data from the record. */
2643
2644 else if (TYPE_THIN_POINTER_P (etype))
2645 {
2646 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2647
2648 expr = save_expr (expr);
2649 if (TREE_CODE (expr) == ADDR_EXPR)
2650 expr = TREE_OPERAND (expr, 0);
2651 else
2652 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2653
2654 template = build_component_ref (expr, NULL_TREE, fields, 0);
2655 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2656 build_component_ref (expr, NULL_TREE,
2657 TREE_CHAIN (fields), 0));
2658 }
2659 else
2660 /* Otherwise, build the constructor for the template. */
2661 template = build_template (template_type, TREE_TYPE (etype), expr);
2662
2663 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2664
2665 /* The result is a CONSTRUCTOR for the fat pointer.
2666
2667 If expr is an argument of a foreign convention subprogram, the type it
2668 points to is directly the component type. In this case, the expression
2669 type may not match the corresponding FIELD_DECL type at this point, so we
2670 call "convert" here to fix that up if necessary. This type consistency is
2671 required, for instance because it ensures that possible later folding of
2672 component_refs against this constructor always yields something of the
2673 same type as the initial reference.
2674
2675 Note that the call to "build_template" above is still fine, because it
2676 will only refer to the provided template_type in this case. */
2677 return
2678 gnat_build_constructor
2679 (type, tree_cons (TYPE_FIELDS (type),
2680 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2681 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2682 template_addr, NULL_TREE)));
2683 }
2684 \f
2685 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2686 is something that is a fat pointer, so convert to it first if it EXPR
2687 is not already a fat pointer. */
2688
2689 static tree
2690 convert_to_thin_pointer (tree type, tree expr)
2691 {
2692 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2693 expr
2694 = convert_to_fat_pointer
2695 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2696
2697 /* We get the pointer to the data and use a NOP_EXPR to make it the
2698 proper GCC type. */
2699 expr
2700 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2701 expr = build1 (NOP_EXPR, type, expr);
2702
2703 return expr;
2704 }
2705 \f
2706 /* Create an expression whose value is that of EXPR,
2707 converted to type TYPE. The TREE_TYPE of the value
2708 is always TYPE. This function implements all reasonable
2709 conversions; callers should filter out those that are
2710 not permitted by the language being compiled. */
2711
2712 tree
2713 convert (tree type, tree expr)
2714 {
2715 enum tree_code code = TREE_CODE (type);
2716 tree etype = TREE_TYPE (expr);
2717 enum tree_code ecode = TREE_CODE (etype);
2718 tree tem;
2719
2720 /* If EXPR is already the right type, we are done. */
2721 if (type == etype)
2722 return expr;
2723
2724 /* If the input type has padding, remove it by doing a component reference
2725 to the field. If the output type has padding, make a constructor
2726 to build the record. If both input and output have padding and are
2727 of variable size, do this as an unchecked conversion. */
2728 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2729 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2730 && (! TREE_CONSTANT (TYPE_SIZE (type))
2731 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2732 ;
2733 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2734 {
2735 /* If we have just converted to this padded type, just get
2736 the inner expression. */
2737 if (TREE_CODE (expr) == CONSTRUCTOR
2738 && CONSTRUCTOR_ELTS (expr) != 0
2739 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2740 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2741 else
2742 return convert (type, build_component_ref (expr, NULL_TREE,
2743 TYPE_FIELDS (etype), 0));
2744 }
2745 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2746 {
2747 /* If we previously converted from another type and our type is
2748 of variable size, remove the conversion to avoid the need for
2749 variable-size temporaries. */
2750 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2751 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2752 expr = TREE_OPERAND (expr, 0);
2753
2754 /* If we are just removing the padding from expr, convert the original
2755 object if we have variable size. That will avoid the need
2756 for some variable-size temporaries. */
2757 if (TREE_CODE (expr) == COMPONENT_REF
2758 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2759 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2760 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2761 return convert (type, TREE_OPERAND (expr, 0));
2762
2763 /* If the result type is a padded type with a self-referentially-sized
2764 field and the expression type is a record, do this as an
2765 unchecked converstion. */
2766 else if (TREE_CODE (etype) == RECORD_TYPE
2767 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2768 return unchecked_convert (type, expr, 0);
2769
2770 else
2771 return
2772 gnat_build_constructor (type,
2773 tree_cons (TYPE_FIELDS (type),
2774 convert (TREE_TYPE
2775 (TYPE_FIELDS (type)),
2776 expr),
2777 NULL_TREE));
2778 }
2779
2780 /* If the input is a biased type, adjust first. */
2781 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2782 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2783 fold (build1 (NOP_EXPR,
2784 TREE_TYPE (etype), expr)),
2785 TYPE_MIN_VALUE (etype))));
2786
2787 /* If the input is a left-justified modular type, we need to extract
2788 the actual object before converting it to any other type with the
2789 exception of an unconstrained array. */
2790 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2791 && code != UNCONSTRAINED_ARRAY_TYPE)
2792 return convert (type, build_component_ref (expr, NULL_TREE,
2793 TYPE_FIELDS (etype), 0));
2794
2795 /* If converting to a type that contains a template, convert to the data
2796 type and then build the template. */
2797 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2798 {
2799 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2800
2801 /* If the source already has a template, get a reference to the
2802 associated array only, as we are going to rebuild a template
2803 for the target type anyway. */
2804 expr = maybe_unconstrained_array (expr);
2805
2806 return
2807 gnat_build_constructor
2808 (type,
2809 tree_cons (TYPE_FIELDS (type),
2810 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2811 obj_type, NULL_TREE),
2812 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2813 convert (obj_type, expr), NULL_TREE)));
2814 }
2815
2816 /* There are some special cases of expressions that we process
2817 specially. */
2818 switch (TREE_CODE (expr))
2819 {
2820 case ERROR_MARK:
2821 return expr;
2822
2823 case NULL_EXPR:
2824 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2825 conversion in gnat_expand_expr. NULL_EXPR does not represent
2826 and actual value, so no conversion is needed. */
2827 expr = copy_node (expr);
2828 TREE_TYPE (expr) = type;
2829 return expr;
2830
2831 case STRING_CST:
2832 case CONSTRUCTOR:
2833 /* If we are converting a STRING_CST to another constrained array type,
2834 just make a new one in the proper type. Likewise for
2835 CONSTRUCTOR if the alias sets are the same. */
2836 if (code == ecode && AGGREGATE_TYPE_P (etype)
2837 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2838 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2839 && (TREE_CODE (expr) == STRING_CST
2840 || get_alias_set (etype) == get_alias_set (type)))
2841 {
2842 expr = copy_node (expr);
2843 TREE_TYPE (expr) = type;
2844 return expr;
2845 }
2846 break;
2847
2848 case COMPONENT_REF:
2849 /* If we are converting between two aggregate types of the same
2850 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2851 This avoid unneeded conversions which makes reference computations
2852 more complex. */
2853 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2854 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2855 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2856 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0)
2857 && get_alias_set (type) == get_alias_set (etype))
2858 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2859 TREE_OPERAND (expr, 1), NULL_TREE);
2860
2861 break;
2862
2863 case UNCONSTRAINED_ARRAY_REF:
2864 /* Convert this to the type of the inner array by getting the address of
2865 the array from the template. */
2866 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2867 build_component_ref (TREE_OPERAND (expr, 0),
2868 get_identifier ("P_ARRAY"),
2869 NULL_TREE, 0));
2870 etype = TREE_TYPE (expr);
2871 ecode = TREE_CODE (etype);
2872 break;
2873
2874 case VIEW_CONVERT_EXPR:
2875 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2876 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2877 return convert (type, TREE_OPERAND (expr, 0));
2878 break;
2879
2880 case INDIRECT_REF:
2881 /* If both types are record types, just convert the pointer and
2882 make a new INDIRECT_REF.
2883
2884 ??? Disable this for now since it causes problems with the
2885 code in build_binary_op for MODIFY_EXPR which wants to
2886 strip off conversions. But that code really is a mess and
2887 we need to do this a much better way some time. */
2888 if (0
2889 && (TREE_CODE (type) == RECORD_TYPE
2890 || TREE_CODE (type) == UNION_TYPE)
2891 && (TREE_CODE (etype) == RECORD_TYPE
2892 || TREE_CODE (etype) == UNION_TYPE)
2893 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2894 return build_unary_op (INDIRECT_REF, NULL_TREE,
2895 convert (build_pointer_type (type),
2896 TREE_OPERAND (expr, 0)));
2897 break;
2898
2899 default:
2900 break;
2901 }
2902
2903 /* Check for converting to a pointer to an unconstrained array. */
2904 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2905 return convert_to_fat_pointer (type, expr);
2906
2907 /* If we're converting between two aggregate types that have the same main
2908 variant, just make a VIEW_CONVER_EXPR. */
2909 else if (AGGREGATE_TYPE_P (type)
2910 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2911 return build1 (VIEW_CONVERT_EXPR, type, expr);
2912
2913 /* In all other cases of related types, make a NOP_EXPR. */
2914 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2915 || (code == INTEGER_CST && ecode == INTEGER_CST
2916 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2917 return fold (build1 (NOP_EXPR, type, expr));
2918
2919 switch (code)
2920 {
2921 case VOID_TYPE:
2922 return build1 (CONVERT_EXPR, type, expr);
2923
2924 case BOOLEAN_TYPE:
2925 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
2926
2927 case INTEGER_TYPE:
2928 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2929 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2930 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2931 return unchecked_convert (type, expr, 0);
2932 else if (TYPE_BIASED_REPRESENTATION_P (type))
2933 return fold (build1 (CONVERT_EXPR, type,
2934 fold (build (MINUS_EXPR, TREE_TYPE (type),
2935 convert (TREE_TYPE (type), expr),
2936 TYPE_MIN_VALUE (type)))));
2937
2938 /* ... fall through ... */
2939
2940 case ENUMERAL_TYPE:
2941 return fold (convert_to_integer (type, expr));
2942
2943 case POINTER_TYPE:
2944 case REFERENCE_TYPE:
2945 /* If converting between two pointers to records denoting
2946 both a template and type, adjust if needed to account
2947 for any differing offsets, since one might be negative. */
2948 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2949 {
2950 tree bit_diff
2951 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2952 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2953 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2954 sbitsize_int (BITS_PER_UNIT));
2955
2956 expr = build1 (NOP_EXPR, type, expr);
2957 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2958 if (integer_zerop (byte_diff))
2959 return expr;
2960
2961 return build_binary_op (PLUS_EXPR, type, expr,
2962 fold (convert_to_pointer (type, byte_diff)));
2963 }
2964
2965 /* If converting to a thin pointer, handle specially. */
2966 if (TYPE_THIN_POINTER_P (type)
2967 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
2968 return convert_to_thin_pointer (type, expr);
2969
2970 /* If converting fat pointer to normal pointer, get the pointer to the
2971 array and then convert it. */
2972 else if (TYPE_FAT_POINTER_P (etype))
2973 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2974 NULL_TREE, 0);
2975
2976 return fold (convert_to_pointer (type, expr));
2977
2978 case REAL_TYPE:
2979 return fold (convert_to_real (type, expr));
2980
2981 case RECORD_TYPE:
2982 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
2983 return
2984 gnat_build_constructor
2985 (type, tree_cons (TYPE_FIELDS (type),
2986 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2987 NULL_TREE));
2988
2989 /* ... fall through ... */
2990
2991 case ARRAY_TYPE:
2992 /* In these cases, assume the front-end has validated the conversion.
2993 If the conversion is valid, it will be a bit-wise conversion, so
2994 it can be viewed as an unchecked conversion. */
2995 return unchecked_convert (type, expr, 0);
2996
2997 case UNION_TYPE:
2998 /* Just validate that the type is indeed that of a field
2999 of the type. Then make the simple conversion. */
3000 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3001 {
3002 if (TREE_TYPE (tem) == etype)
3003 return build1 (CONVERT_EXPR, type, expr);
3004 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3005 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3006 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3007 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3008 return build1 (CONVERT_EXPR, type,
3009 convert (TREE_TYPE (tem), expr));
3010 }
3011
3012 gigi_abort (413);
3013
3014 case UNCONSTRAINED_ARRAY_TYPE:
3015 /* If EXPR is a constrained array, take its address, convert it to a
3016 fat pointer, and then dereference it. Likewise if EXPR is a
3017 record containing both a template and a constrained array.
3018 Note that a record representing a left justified modular type
3019 always represents a packed constrained array. */
3020 if (ecode == ARRAY_TYPE
3021 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3022 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3023 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3024 return
3025 build_unary_op
3026 (INDIRECT_REF, NULL_TREE,
3027 convert_to_fat_pointer (TREE_TYPE (type),
3028 build_unary_op (ADDR_EXPR,
3029 NULL_TREE, expr)));
3030
3031 /* Do something very similar for converting one unconstrained
3032 array to another. */
3033 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3034 return
3035 build_unary_op (INDIRECT_REF, NULL_TREE,
3036 convert (TREE_TYPE (type),
3037 build_unary_op (ADDR_EXPR,
3038 NULL_TREE, expr)));
3039 else
3040 gigi_abort (409);
3041
3042 case COMPLEX_TYPE:
3043 return fold (convert_to_complex (type, expr));
3044
3045 default:
3046 gigi_abort (410);
3047 }
3048 }
3049 \f
3050 /* Remove all conversions that are done in EXP. This includes converting
3051 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3052 is nonzero, always return the address of the containing object even if
3053 the address is not bit-aligned. */
3054
3055 tree
3056 remove_conversions (tree exp, int true_address)
3057 {
3058 switch (TREE_CODE (exp))
3059 {
3060 case CONSTRUCTOR:
3061 if (true_address
3062 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3063 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3064 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3065 break;
3066
3067 case COMPONENT_REF:
3068 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3069 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3070 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3071 break;
3072
3073 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3074 case NOP_EXPR: case CONVERT_EXPR:
3075 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3076
3077 default:
3078 break;
3079 }
3080
3081 return exp;
3082 }
3083 \f
3084 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3085 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3086 likewise return an expression pointing to the underlying array. */
3087
3088 tree
3089 maybe_unconstrained_array (tree exp)
3090 {
3091 enum tree_code code = TREE_CODE (exp);
3092 tree new;
3093
3094 switch (TREE_CODE (TREE_TYPE (exp)))
3095 {
3096 case UNCONSTRAINED_ARRAY_TYPE:
3097 if (code == UNCONSTRAINED_ARRAY_REF)
3098 {
3099 new
3100 = build_unary_op (INDIRECT_REF, NULL_TREE,
3101 build_component_ref (TREE_OPERAND (exp, 0),
3102 get_identifier ("P_ARRAY"),
3103 NULL_TREE, 0));
3104 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3105 return new;
3106 }
3107
3108 else if (code == NULL_EXPR)
3109 return build1 (NULL_EXPR,
3110 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3111 (TREE_TYPE (TREE_TYPE (exp))))),
3112 TREE_OPERAND (exp, 0));
3113
3114 case RECORD_TYPE:
3115 /* If this is a padded type, convert to the unpadded type and see if
3116 it contains a template. */
3117 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3118 {
3119 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3120 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3121 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3122 return
3123 build_component_ref (new, NULL_TREE,
3124 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3125 0);
3126 }
3127 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3128 return
3129 build_component_ref (exp, NULL_TREE,
3130 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3131 break;
3132
3133 default:
3134 break;
3135 }
3136
3137 return exp;
3138 }
3139 \f
3140 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3141 If NOTRUNC_P is set, truncation operations should be suppressed. */
3142
3143 tree
3144 unchecked_convert (tree type, tree expr, int notrunc_p)
3145 {
3146 tree etype = TREE_TYPE (expr);
3147
3148 /* If the expression is already the right type, we are done. */
3149 if (etype == type)
3150 return expr;
3151
3152 /* If both types types are integral just do a normal conversion.
3153 Likewise for a conversion to an unconstrained array. */
3154 if ((((INTEGRAL_TYPE_P (type)
3155 && ! (TREE_CODE (type) == INTEGER_TYPE
3156 && TYPE_VAX_FLOATING_POINT_P (type)))
3157 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3158 || (TREE_CODE (type) == RECORD_TYPE
3159 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3160 && ((INTEGRAL_TYPE_P (etype)
3161 && ! (TREE_CODE (etype) == INTEGER_TYPE
3162 && TYPE_VAX_FLOATING_POINT_P (etype)))
3163 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3164 || (TREE_CODE (etype) == RECORD_TYPE
3165 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3166 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3167 {
3168 tree rtype = type;
3169
3170 if (TREE_CODE (etype) == INTEGER_TYPE
3171 && TYPE_BIASED_REPRESENTATION_P (etype))
3172 {
3173 tree ntype = copy_type (etype);
3174
3175 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3176 TYPE_MAIN_VARIANT (ntype) = ntype;
3177 expr = build1 (NOP_EXPR, ntype, expr);
3178 }
3179
3180 if (TREE_CODE (type) == INTEGER_TYPE
3181 && TYPE_BIASED_REPRESENTATION_P (type))
3182 {
3183 rtype = copy_type (type);
3184 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3185 TYPE_MAIN_VARIANT (rtype) = rtype;
3186 }
3187
3188 expr = convert (rtype, expr);
3189 if (type != rtype)
3190 expr = build1 (NOP_EXPR, type, expr);
3191 }
3192
3193 /* If we are converting TO an integral type whose precision is not the
3194 same as its size, first unchecked convert to a record that contains
3195 an object of the output type. Then extract the field. */
3196 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3197 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3198 GET_MODE_BITSIZE (TYPE_MODE (type))))
3199 {
3200 tree rec_type = make_node (RECORD_TYPE);
3201 tree field = create_field_decl (get_identifier ("OBJ"), type,
3202 rec_type, 1, 0, 0, 0);
3203
3204 TYPE_FIELDS (rec_type) = field;
3205 layout_type (rec_type);
3206
3207 expr = unchecked_convert (rec_type, expr, notrunc_p);
3208 expr = build_component_ref (expr, NULL_TREE, field, 0);
3209 }
3210
3211 /* Similarly for integral input type whose precision is not equal to its
3212 size. */
3213 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3214 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3215 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3216 {
3217 tree rec_type = make_node (RECORD_TYPE);
3218 tree field
3219 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3220 1, 0, 0, 0);
3221
3222 TYPE_FIELDS (rec_type) = field;
3223 layout_type (rec_type);
3224
3225 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3226 expr = unchecked_convert (type, expr, notrunc_p);
3227 }
3228
3229 /* We have a special case when we are converting between two
3230 unconstrained array types. In that case, take the address,
3231 convert the fat pointer types, and dereference. */
3232 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3233 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3234 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3235 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3236 build_unary_op (ADDR_EXPR, NULL_TREE,
3237 expr)));
3238 else
3239 {
3240 expr = maybe_unconstrained_array (expr);
3241 etype = TREE_TYPE (expr);
3242 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3243 }
3244
3245 /* If the result is an integral type whose size is not equal to
3246 the size of the underlying machine type, sign- or zero-extend
3247 the result. We need not do this in the case where the input is
3248 an integral type of the same precision and signedness or if the output
3249 is a biased type or if both the input and output are unsigned. */
3250 if (! notrunc_p
3251 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3252 && ! (TREE_CODE (type) == INTEGER_TYPE
3253 && TYPE_BIASED_REPRESENTATION_P (type))
3254 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3255 GET_MODE_BITSIZE (TYPE_MODE (type)))
3256 && ! (INTEGRAL_TYPE_P (etype)
3257 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3258 && operand_equal_p (TYPE_RM_SIZE (type),
3259 (TYPE_RM_SIZE (etype) != 0
3260 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3261 0))
3262 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3263 {
3264 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3265 TYPE_UNSIGNED (type));
3266 tree shift_expr
3267 = convert (base_type,
3268 size_binop (MINUS_EXPR,
3269 bitsize_int
3270 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3271 TYPE_RM_SIZE (type)));
3272 expr
3273 = convert (type,
3274 build_binary_op (RSHIFT_EXPR, base_type,
3275 build_binary_op (LSHIFT_EXPR, base_type,
3276 convert (base_type, expr),
3277 shift_expr),
3278 shift_expr));
3279 }
3280
3281 /* An unchecked conversion should never raise Constraint_Error. The code
3282 below assumes that GCC's conversion routines overflow the same way that
3283 the underlying hardware does. This is probably true. In the rare case
3284 when it is false, we can rely on the fact that such conversions are
3285 erroneous anyway. */
3286 if (TREE_CODE (expr) == INTEGER_CST)
3287 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3288
3289 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3290 show no longer constant. */
3291 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3292 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3293 OEP_ONLY_CONST))
3294 TREE_CONSTANT (expr) = 0;
3295
3296 return expr;
3297 }
3298
3299 #include "gt-ada-utils.h"
3300 #include "gtype-ada.h"