re PR c/14516 (-fleading-underscore does not work correctly for file static variables)
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41 #include "cgraph.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
63
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
67
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
70
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
73
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
76
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
81
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
84
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
87 goes later. */
88 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
89
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types[NUM_MACHINE_MODES];
92
93 /* For each binding contour we allocate a binding_level structure to indicate
94 the binding depth. */
95
96 struct gnat_binding_level GTY((chain_next ("%h.chain")))
97 {
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level *chain;
100 /* The BLOCK node for this level. */
101 tree block;
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
104 tree jmpbuf_decl;
105 };
106
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level *current_binding_level;
109
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
112
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable)) tree free_block_chain;
115
116 struct language_function GTY(())
117 {
118 int unused;
119 };
120
121 static void gnat_define_builtin (const char *, tree, int, const char *, bool);
122 static void gnat_install_builtins (void);
123 static tree merge_sizes (tree, tree, tree, bool, bool);
124 static tree compute_related_constant (tree, tree);
125 static tree split_plus (tree, tree *);
126 static bool value_zerop (tree);
127 static void gnat_gimplify_function (tree);
128 static void gnat_finalize (tree);
129 static tree float_type_for_precision (int, enum machine_mode);
130 static tree convert_to_fat_pointer (tree, tree);
131 static tree convert_to_thin_pointer (tree, tree);
132 static tree make_descriptor_field (const char *,tree, tree, tree);
133 static bool value_factor_p (tree, HOST_WIDE_INT);
134 static bool potential_alignment_gap (tree, tree, tree);
135 \f
136 /* Initialize the association of GNAT nodes to GCC trees. */
137
138 void
139 init_gnat_to_gnu (void)
140 {
141 associate_gnat_to_gnu
142 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
143 }
144
145 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
146 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
147 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
148
149 If GNU_DECL is zero, a previous association is to be reset. */
150
151 void
152 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
153 {
154 /* Check that GNAT_ENTITY is not already defined and that it is being set
155 to something which is a decl. Raise gigi 401 if not. Usually, this
156 means GNAT_ENTITY is defined twice, but occasionally is due to some
157 Gigi problem. */
158 if (gnu_decl
159 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
160 || (!no_check && !DECL_P (gnu_decl))))
161 abort ();
162
163 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
164 }
165
166 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
167 Return the ..._DECL node that was associated with it. If there is no tree
168 node associated with GNAT_ENTITY, abort.
169
170 In some cases, such as delayed elaboration or expressions that need to
171 be elaborated only once, GNAT_ENTITY is really not an entity. */
172
173 tree
174 get_gnu_tree (Entity_Id gnat_entity)
175 {
176 if (!associate_gnat_to_gnu[gnat_entity - First_Node_Id])
177 abort ();
178
179 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
180 }
181
182 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
183
184 bool
185 present_gnu_tree (Entity_Id gnat_entity)
186 {
187 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id]) != 0;
188 }
189
190 \f
191 /* Return non-zero if we are currently in the global binding level. */
192
193 int
194 global_bindings_p (void)
195 {
196 return (force_global || !current_binding_level
197 || !current_binding_level->chain ? -1 : 0);
198 }
199
200 /* Enter a new binding level. */
201
202 void
203 gnat_pushlevel ()
204 {
205 struct gnat_binding_level *newlevel = NULL;
206
207 /* Reuse a struct for this binding level, if there is one. */
208 if (free_binding_level)
209 {
210 newlevel = free_binding_level;
211 free_binding_level = free_binding_level->chain;
212 }
213 else
214 newlevel
215 = (struct gnat_binding_level *)
216 ggc_alloc (sizeof (struct gnat_binding_level));
217
218 /* Use a free BLOCK, if any; otherwise, allocate one. */
219 if (free_block_chain)
220 {
221 newlevel->block = free_block_chain;
222 free_block_chain = TREE_CHAIN (free_block_chain);
223 TREE_CHAIN (newlevel->block) = NULL_TREE;
224 }
225 else
226 newlevel->block = make_node (BLOCK);
227
228 /* Point the BLOCK we just made to its parent. */
229 if (current_binding_level)
230 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
231
232 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
233 TREE_USED (newlevel->block) = 1;
234
235 /* Add this level to the front of the chain (stack) of levels that are
236 active. */
237 newlevel->chain = current_binding_level;
238 newlevel->jmpbuf_decl = NULL_TREE;
239 current_binding_level = newlevel;
240 }
241
242 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
243 and point FNDECL to this BLOCK. */
244
245 void
246 set_current_block_context (tree fndecl)
247 {
248 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
249 DECL_INITIAL (fndecl) = current_binding_level->block;
250 }
251
252 /* Set the jmpbuf_decl for the current binding level to DECL. */
253
254 void
255 set_block_jmpbuf_decl (tree decl)
256 {
257 current_binding_level->jmpbuf_decl = decl;
258 }
259
260 /* Get the jmpbuf_decl, if any, for the current binding level. */
261
262 tree
263 get_block_jmpbuf_decl ()
264 {
265 return current_binding_level->jmpbuf_decl;
266 }
267
268 /* Exit a binding level. Set any BLOCK into the current code group. */
269
270 void
271 gnat_poplevel ()
272 {
273 struct gnat_binding_level *level = current_binding_level;
274 tree block = level->block;
275
276 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
277 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
278
279 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
280 are no variables free the block and merge its subblocks into those of its
281 parent block. Otherwise, add it to the list of its parent. */
282 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
283 ;
284 else if (BLOCK_VARS (block) == NULL_TREE)
285 {
286 BLOCK_SUBBLOCKS (level->chain->block)
287 = chainon (BLOCK_SUBBLOCKS (block),
288 BLOCK_SUBBLOCKS (level->chain->block));
289 TREE_CHAIN (block) = free_block_chain;
290 free_block_chain = block;
291 }
292 else
293 {
294 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
295 BLOCK_SUBBLOCKS (level->chain->block) = block;
296 TREE_USED (block) = 1;
297 set_block_for_group (block);
298 }
299
300 /* Free this binding structure. */
301 current_binding_level = level->chain;
302 level->chain = free_binding_level;
303 free_binding_level = level;
304 }
305
306 /* Insert BLOCK at the end of the list of subblocks of the
307 current binding level. This is used when a BIND_EXPR is expanded,
308 to handle the BLOCK node inside the BIND_EXPR. */
309
310 void
311 insert_block (tree block)
312 {
313 TREE_USED (block) = 1;
314 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
315 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
316 }
317 \f
318 /* Records a ..._DECL node DECL as belonging to the current lexical scope
319 and uses GNAT_NODE for location information. */
320
321 void
322 gnat_pushdecl (tree decl, Node_Id gnat_node)
323 {
324 /* If at top level, there is no context. But PARM_DECLs always go in the
325 level of its function. */
326 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
327 DECL_CONTEXT (decl) = 0;
328 else
329 DECL_CONTEXT (decl) = current_function_decl;
330
331 /* Set the location of DECL and emit a declaration for it. */
332 if (Present (gnat_node))
333 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
334 add_decl_expr (decl, gnat_node);
335
336 /* Put the declaration on the list. The list of declarations is in reverse
337 order. The list will be reversed later. We don't do this for global
338 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
339 the list. They will cause trouble with the debugger and aren't needed
340 anyway. */
341 if (!global_bindings_p ()
342 && (TREE_CODE (decl) != TYPE_DECL
343 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE))
344 {
345 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
346 BLOCK_VARS (current_binding_level->block) = decl;
347 }
348
349 /* For the declaration of a type, set its name if it either is not already
350 set, was set to an IDENTIFIER_NODE, indicating an internal name,
351 or if the previous type name was not derived from a source name.
352 We'd rather have the type named with a real name and all the pointer
353 types to the same object have the same POINTER_TYPE node. Code in this
354 function in c-decl.c makes a copy of the type node here, but that may
355 cause us trouble with incomplete types, so let's not try it (at least
356 for now). */
357
358 if (TREE_CODE (decl) == TYPE_DECL
359 && DECL_NAME (decl)
360 && (!TYPE_NAME (TREE_TYPE (decl))
361 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
362 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
363 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
364 && !DECL_ARTIFICIAL (decl))))
365 TYPE_NAME (TREE_TYPE (decl)) = decl;
366
367 if (TREE_CODE (decl) != CONST_DECL)
368 rest_of_decl_compilation (decl, NULL, global_bindings_p (), 0);
369 }
370 \f
371 /* Do little here. Set up the standard declarations later after the
372 front end has been run. */
373
374 void
375 gnat_init_decl_processing (void)
376 {
377 input_line = 0;
378
379 /* Make the binding_level structure for global names. */
380 current_function_decl = 0;
381 current_binding_level = 0;
382 free_binding_level = 0;
383 gnat_pushlevel ();
384
385 build_common_tree_nodes (0);
386
387 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
388 corresponding to the size of Pmode. In most cases when ptr_mode and
389 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
390 far better code using the width of Pmode. Make this here since we need
391 this before we can expand the GNAT types. */
392 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
393 set_sizetype (size_type_node);
394 build_common_tree_nodes_2 (0);
395
396 /* Give names and make TYPE_DECLs for common types. */
397 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
398 Empty);
399 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
400 integer_type_node),
401 Empty);
402 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
403 char_type_node),
404 Empty);
405 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
406 long_integer_type_node),
407 Empty);
408
409 ptr_void_type_node = build_pointer_type (void_type_node);
410
411 gnat_install_builtins ();
412 }
413
414 /* Define a builtin function. This is temporary and is just being done
415 to initialize implicit_built_in_decls for the middle-end. We'll want
416 to do full builtin processing soon. */
417
418 static void
419 gnat_define_builtin (const char *name, tree type,
420 int function_code, const char *library_name, bool const_p)
421 {
422 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
423
424 DECL_EXTERNAL (decl) = 1;
425 TREE_PUBLIC (decl) = 1;
426 if (library_name)
427 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
428 make_decl_rtl (decl, NULL);
429 gnat_pushdecl (decl, Empty);
430 DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
431 DECL_FUNCTION_CODE (decl) = function_code;
432 TREE_READONLY (decl) = const_p;
433
434 implicit_built_in_decls[function_code] = decl;
435 }
436
437 /* Install the builtin functions the middle-end needs. */
438
439 static void
440 gnat_install_builtins ()
441 {
442 tree ftype;
443 tree tmp;
444
445 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
446 tmp = tree_cons (NULL_TREE, long_integer_type_node, tmp);
447 ftype = build_function_type (long_integer_type_node, tmp);
448 gnat_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT,
449 "__builtin_expect", true);
450
451 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
452 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
453 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
454 ftype = build_function_type (ptr_void_type_node, tmp);
455 gnat_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
456 "memcpy", false);
457
458 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
459 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
460 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
461 ftype = build_function_type (integer_type_node, tmp);
462 gnat_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
463 "memcmp", false);
464
465 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
466 tmp = tree_cons (NULL_TREE, integer_type_node, tmp);
467 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
468 ftype = build_function_type (integer_type_node, tmp);
469 gnat_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
470 "memset", false);
471
472 tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
473 ftype = build_function_type (integer_type_node, tmp);
474 gnat_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "clz", true);
475
476 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
477 ftype = build_function_type (integer_type_node, tmp);
478 gnat_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "clzl", true);
479
480 tmp = tree_cons (NULL_TREE, long_long_integer_type_node, void_list_node);
481 ftype = build_function_type (integer_type_node, tmp);
482 gnat_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "clzll",
483 true);
484
485 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
486 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
487 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
488 ftype = build_function_type (void_type_node, tmp);
489 gnat_define_builtin ("__builtin_init_trampoline", ftype,
490 BUILT_IN_INIT_TRAMPOLINE, "init_trampoline", false);
491
492 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
493 ftype = build_function_type (ptr_void_type_node, tmp);
494 gnat_define_builtin ("__builtin_adjust_trampoline", ftype,
495 BUILT_IN_ADJUST_TRAMPOLINE, "adjust_trampoline", true);
496
497 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
498 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
499 ftype = build_function_type (ptr_void_type_node, tmp);
500 gnat_define_builtin ("__builtin_stack_alloc", ftype, BUILT_IN_STACK_ALLOC,
501 "stack_alloc", false);
502
503 /* The stack_save and stack_restore builtins aren't used directly. They
504 are inserted during gimplification to implement stack_alloc calls. */
505 ftype = build_function_type (ptr_void_type_node, void_list_node);
506 gnat_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
507 "stack_save", false);
508 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
509 ftype = build_function_type (void_type_node, tmp);
510 gnat_define_builtin ("__builtin_stack_restore", ftype,
511 BUILT_IN_STACK_RESTORE, "stack_restore", false);
512 }
513
514 /* Create the predefined scalar types such as `integer_type_node' needed
515 in the gcc back-end and initialize the global binding level. */
516
517 void
518 init_gigi_decls (tree long_long_float_type, tree exception_type)
519 {
520 tree endlink, decl;
521 unsigned int i;
522
523 /* Set the types that GCC and Gigi use from the front end. We would like
524 to do this for char_type_node, but it needs to correspond to the C
525 char type. */
526 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 {
528 /* In this case, the builtin floating point types are VAX float,
529 so make up a type for use. */
530 longest_float_type_node = make_node (REAL_TYPE);
531 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
532 layout_type (longest_float_type_node);
533 create_type_decl (get_identifier ("longest float type"),
534 longest_float_type_node, NULL, false, true, Empty);
535 }
536 else
537 longest_float_type_node = TREE_TYPE (long_long_float_type);
538
539 except_type_node = TREE_TYPE (exception_type);
540
541 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
542 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
543 NULL, false, true, Empty);
544
545 void_type_decl_node = create_type_decl (get_identifier ("void"),
546 void_type_node, NULL, false, true,
547 Empty);
548
549 void_ftype = build_function_type (void_type_node, NULL_TREE);
550 ptr_void_ftype = build_pointer_type (void_ftype);
551
552 /* Now declare runtime functions. */
553 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554
555 /* malloc is a function declaration tree for a function to allocate
556 memory. */
557 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
558 NULL_TREE,
559 build_function_type (ptr_void_type_node,
560 tree_cons (NULL_TREE,
561 sizetype,
562 endlink)),
563 NULL_TREE, false, true, true, NULL,
564 Empty);
565
566 /* free is a function declaration tree for a function to free memory. */
567 free_decl
568 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
569 build_function_type (void_type_node,
570 tree_cons (NULL_TREE,
571 ptr_void_type_node,
572 endlink)),
573 NULL_TREE, false, true, true, NULL, Empty);
574
575 /* Make the types and functions used for exception processing. */
576 jmpbuf_type
577 = build_array_type (gnat_type_for_mode (Pmode, 0),
578 build_index_type (build_int_2 (5, 0)));
579 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
580 false, true, Empty);
581 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
582
583 /* Functions to get and set the jumpbuf pointer for the current thread. */
584 get_jmpbuf_decl
585 = create_subprog_decl
586 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
587 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
588 NULL_TREE, false, true, true, NULL, Empty);
589
590 set_jmpbuf_decl
591 = create_subprog_decl
592 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
593 NULL_TREE,
594 build_function_type (void_type_node,
595 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
596 NULL_TREE, false, true, true, NULL, Empty);
597
598 /* Function to get the current exception. */
599 get_excptr_decl
600 = create_subprog_decl
601 (get_identifier ("system__soft_links__get_gnat_exception"),
602 NULL_TREE,
603 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
604 NULL_TREE, false, true, true, NULL, Empty);
605
606 /* Functions that raise exceptions. */
607 raise_nodefer_decl
608 = create_subprog_decl
609 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
610 build_function_type (void_type_node,
611 tree_cons (NULL_TREE,
612 build_pointer_type (except_type_node),
613 endlink)),
614 NULL_TREE, false, true, true, NULL, Empty);
615
616 /* Hooks to call when entering/leaving an exception handler. */
617 begin_handler_decl
618 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
619 build_function_type (void_type_node,
620 tree_cons (NULL_TREE,
621 ptr_void_type_node,
622 endlink)),
623 NULL_TREE, false, true, true, NULL, Empty);
624
625 end_handler_decl
626 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
627 build_function_type (void_type_node,
628 tree_cons (NULL_TREE,
629 ptr_void_type_node,
630 endlink)),
631 NULL_TREE, false, true, true, NULL, Empty);
632
633 /* If in no exception handlers mode, all raise statements are redirected to
634 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
635 this procedure will never be called in this mode. */
636 if (No_Exception_Handlers_Set ())
637 {
638 decl
639 = create_subprog_decl
640 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
641 build_function_type (void_type_node,
642 tree_cons (NULL_TREE,
643 build_pointer_type (char_type_node),
644 tree_cons (NULL_TREE,
645 integer_type_node,
646 endlink))),
647 NULL_TREE, false, true, true, NULL, Empty);
648
649 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
650 gnat_raise_decls[i] = decl;
651 }
652 else
653 /* Otherwise, make one decl for each exception reason. */
654 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
655 {
656 char name[17];
657
658 sprintf (name, "__gnat_rcheck_%.2d", i);
659 gnat_raise_decls[i]
660 = create_subprog_decl
661 (get_identifier (name), NULL_TREE,
662 build_function_type (void_type_node,
663 tree_cons (NULL_TREE,
664 build_pointer_type
665 (char_type_node),
666 tree_cons (NULL_TREE,
667 integer_type_node,
668 endlink))),
669 NULL_TREE, false, true, true, NULL, Empty);
670 }
671
672 /* Indicate that these never return. */
673 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
674 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
675 TREE_TYPE (raise_nodefer_decl)
676 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
677 TYPE_QUAL_VOLATILE);
678
679 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
680 {
681 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
682 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
683 TREE_TYPE (gnat_raise_decls[i])
684 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
685 TYPE_QUAL_VOLATILE);
686 }
687
688 /* setjmp returns an integer and has one operand, which is a pointer to
689 a jmpbuf. */
690 setjmp_decl
691 = create_subprog_decl
692 (get_identifier ("__builtin_setjmp"), NULL_TREE,
693 build_function_type (integer_type_node,
694 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
695 NULL_TREE, false, true, true, NULL, Empty);
696
697 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
698 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
699
700 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
701 address. */
702 update_setjmp_buf_decl
703 = create_subprog_decl
704 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
705 build_function_type (void_type_node,
706 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
707 NULL_TREE, false, true, true, NULL, Empty);
708
709 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
710 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
711
712 main_identifier_node = get_identifier ("main");
713 }
714 \f
715 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
716 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
717 true, this record has a rep clause; don't call layout_type but merely set
718 the size and alignment ourselves. If DEFER_DEBUG is true, do not call
719 the debugging routines on this type; it will be done later. */
720
721 void
722 finish_record_type (tree record_type, tree fieldlist, bool has_rep,
723 bool defer_debug)
724 {
725 enum tree_code code = TREE_CODE (record_type);
726 tree ada_size = bitsize_zero_node;
727 tree size = bitsize_zero_node;
728 tree size_unit = size_zero_node;
729 bool var_size = false;
730 tree field;
731
732 TYPE_FIELDS (record_type) = fieldlist;
733 TYPE_STUB_DECL (record_type)
734 = build_decl (TYPE_DECL, NULL_TREE, record_type);
735
736 /* We don't need both the typedef name and the record name output in
737 the debugging information, since they are the same. */
738 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
739
740 /* Globally initialize the record first. If this is a rep'ed record,
741 that just means some initializations; otherwise, layout the record. */
742
743 if (has_rep)
744 {
745 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
746 TYPE_MODE (record_type) = BLKmode;
747 if (!TYPE_SIZE (record_type))
748 {
749 TYPE_SIZE (record_type) = bitsize_zero_node;
750 TYPE_SIZE_UNIT (record_type) = size_zero_node;
751 }
752 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
753 out just like a UNION_TYPE, since the size will be fixed. */
754 else if (code == QUAL_UNION_TYPE)
755 code = UNION_TYPE;
756 }
757 else
758 {
759 /* Ensure there isn't a size already set. There can be in an error
760 case where there is a rep clause but all fields have errors and
761 no longer have a position. */
762 TYPE_SIZE (record_type) = 0;
763 layout_type (record_type);
764 }
765
766 /* At this point, the position and size of each field is known. It was
767 either set before entry by a rep clause, or by laying out the type above.
768
769 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
770 to compute the Ada size; the GCC size and alignment (for rep'ed records
771 that are not padding types); and the mode (for rep'ed records). We also
772 clear the DECL_BIT_FIELD indication for the cases we know have not been
773 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
774
775 if (code == QUAL_UNION_TYPE)
776 fieldlist = nreverse (fieldlist);
777
778 for (field = fieldlist; field; field = TREE_CHAIN (field))
779 {
780 tree pos = bit_position (field);
781
782 tree type = TREE_TYPE (field);
783 tree this_size = DECL_SIZE (field);
784 tree this_size_unit = DECL_SIZE_UNIT (field);
785 tree this_ada_size = DECL_SIZE (field);
786
787 /* We need to make an XVE/XVU record if any field has variable size,
788 whether or not the record does. For example, if we have an union,
789 it may be that all fields, rounded up to the alignment, have the
790 same size, in which case we'll use that size. But the debug
791 output routines (except Dwarf2) won't be able to output the fields,
792 so we need to make the special record. */
793 if (TREE_CODE (this_size) != INTEGER_CST)
794 var_size = true;
795
796 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
797 || TREE_CODE (type) == QUAL_UNION_TYPE)
798 && !TYPE_IS_FAT_POINTER_P (type)
799 && !TYPE_CONTAINS_TEMPLATE_P (type)
800 && TYPE_ADA_SIZE (type))
801 this_ada_size = TYPE_ADA_SIZE (type);
802
803 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
804 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
805 && value_factor_p (pos, BITS_PER_UNIT)
806 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
807 DECL_BIT_FIELD (field) = 0;
808
809 /* If we still have DECL_BIT_FIELD set at this point, we know the field
810 is technically not addressable. Except that it can actually be
811 addressed if the field is BLKmode and happens to be properly
812 aligned. */
813 DECL_NONADDRESSABLE_P (field)
814 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
815
816 if (has_rep && !DECL_BIT_FIELD (field))
817 TYPE_ALIGN (record_type)
818 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
819
820 switch (code)
821 {
822 case UNION_TYPE:
823 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
824 size = size_binop (MAX_EXPR, size, this_size);
825 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
826 break;
827
828 case QUAL_UNION_TYPE:
829 ada_size
830 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
831 this_ada_size, ada_size));
832 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
833 this_size, size));
834 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
835 this_size_unit, size_unit));
836 break;
837
838 case RECORD_TYPE:
839 /* Since we know here that all fields are sorted in order of
840 increasing bit position, the size of the record is one
841 higher than the ending bit of the last field processed
842 unless we have a rep clause, since in that case we might
843 have a field outside a QUAL_UNION_TYPE that has a higher ending
844 position. So use a MAX in that case. Also, if this field is a
845 QUAL_UNION_TYPE, we need to take into account the previous size in
846 the case of empty variants. */
847 ada_size
848 = merge_sizes (ada_size, pos, this_ada_size,
849 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
850 size = merge_sizes (size, pos, this_size,
851 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
852 size_unit
853 = merge_sizes (size_unit, byte_position (field), this_size_unit,
854 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
855 break;
856
857 default:
858 abort ();
859 }
860 }
861
862 if (code == QUAL_UNION_TYPE)
863 nreverse (fieldlist);
864
865 /* If this is a padding record, we never want to make the size smaller than
866 what was specified in it, if any. */
867 if (TREE_CODE (record_type) == RECORD_TYPE
868 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
869 {
870 size = TYPE_SIZE (record_type);
871 size_unit = TYPE_SIZE_UNIT (record_type);
872 }
873
874 /* Now set any of the values we've just computed that apply. */
875 if (!TYPE_IS_FAT_POINTER_P (record_type)
876 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
877 SET_TYPE_ADA_SIZE (record_type, ada_size);
878
879 if (has_rep)
880 {
881 if (!(TREE_CODE (record_type) == RECORD_TYPE
882 && TYPE_IS_PADDING_P (record_type)
883 && CONTAINS_PLACEHOLDER_P (size)))
884 {
885 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
886 TYPE_SIZE_UNIT (record_type)
887 = round_up (size_unit,
888 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
889 }
890
891 compute_record_mode (record_type);
892 }
893
894 if (!defer_debug)
895 {
896 /* If this record is of variable size, rename it so that the
897 debugger knows it is and make a new, parallel, record
898 that tells the debugger how the record is laid out. See
899 exp_dbug.ads. But don't do this for records that are padding
900 since they confuse GDB. */
901 if (var_size
902 && !(TREE_CODE (record_type) == RECORD_TYPE
903 && TYPE_IS_PADDING_P (record_type)))
904 {
905 tree new_record_type
906 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
907 ? UNION_TYPE : TREE_CODE (record_type));
908 tree orig_name = TYPE_NAME (record_type);
909 tree orig_id
910 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
911 : orig_name);
912 tree new_id
913 = concat_id_with_name (orig_id,
914 TREE_CODE (record_type) == QUAL_UNION_TYPE
915 ? "XVU" : "XVE");
916 tree last_pos = bitsize_zero_node;
917 tree old_field;
918 tree prev_old_field = 0;
919
920 TYPE_NAME (new_record_type) = new_id;
921 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
922 TYPE_STUB_DECL (new_record_type)
923 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
924 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
925 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
926 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
927 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
928 TYPE_SIZE_UNIT (new_record_type)
929 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
930
931 /* Now scan all the fields, replacing each field with a new
932 field corresponding to the new encoding. */
933 for (old_field = TYPE_FIELDS (record_type); old_field;
934 old_field = TREE_CHAIN (old_field))
935 {
936 tree field_type = TREE_TYPE (old_field);
937 tree field_name = DECL_NAME (old_field);
938 tree new_field;
939 tree curpos = bit_position (old_field);
940 bool var = false;
941 unsigned int align = 0;
942 tree pos;
943
944 /* See how the position was modified from the last position.
945
946 There are two basic cases we support: a value was added
947 to the last position or the last position was rounded to
948 a boundary and they something was added. Check for the
949 first case first. If not, see if there is any evidence
950 of rounding. If so, round the last position and try
951 again.
952
953 If this is a union, the position can be taken as zero. */
954
955 if (TREE_CODE (new_record_type) == UNION_TYPE)
956 pos = bitsize_zero_node, align = 0;
957 else
958 pos = compute_related_constant (curpos, last_pos);
959
960 if (!pos && TREE_CODE (curpos) == MULT_EXPR
961 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
962 {
963 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
964 pos = compute_related_constant (curpos,
965 round_up (last_pos, align));
966 }
967 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
968 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
969 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
970 && host_integerp (TREE_OPERAND
971 (TREE_OPERAND (curpos, 0), 1),
972 1))
973 {
974 align
975 = tree_low_cst
976 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
977 pos = compute_related_constant (curpos,
978 round_up (last_pos, align));
979 }
980 else if (potential_alignment_gap (prev_old_field, old_field,
981 pos))
982 {
983 align = TYPE_ALIGN (field_type);
984 pos = compute_related_constant (curpos,
985 round_up (last_pos, align));
986 }
987
988 /* If we can't compute a position, set it to zero.
989
990 ??? We really should abort here, but it's too much work
991 to get this correct for all cases. */
992
993 if (!pos)
994 pos = bitsize_zero_node;
995
996 /* See if this type is variable-size and make a new type
997 and indicate the indirection if so. */
998 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
999 {
1000 field_type = build_pointer_type (field_type);
1001 var = true;
1002 }
1003
1004 /* Make a new field name, if necessary. */
1005 if (var || align != 0)
1006 {
1007 char suffix[6];
1008
1009 if (align != 0)
1010 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1011 align / BITS_PER_UNIT);
1012 else
1013 strcpy (suffix, "XVL");
1014
1015 field_name = concat_id_with_name (field_name, suffix);
1016 }
1017
1018 new_field = create_field_decl (field_name, field_type,
1019 new_record_type, 0,
1020 DECL_SIZE (old_field), pos, 0);
1021 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1022 TYPE_FIELDS (new_record_type) = new_field;
1023
1024 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1025 zero. The only time it's not the last field of the record
1026 is when there are other components at fixed positions after
1027 it (meaning there was a rep clause for every field) and we
1028 want to be able to encode them. */
1029 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1030 (TREE_CODE (TREE_TYPE (old_field))
1031 == QUAL_UNION_TYPE)
1032 ? bitsize_zero_node
1033 : DECL_SIZE (old_field));
1034 prev_old_field = old_field;
1035 }
1036
1037 TYPE_FIELDS (new_record_type)
1038 = nreverse (TYPE_FIELDS (new_record_type));
1039
1040 rest_of_type_compilation (new_record_type, global_bindings_p ());
1041 }
1042
1043 rest_of_type_compilation (record_type, global_bindings_p ());
1044 }
1045 }
1046
1047 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1048 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1049 if this represents a QUAL_UNION_TYPE in which case we must look for
1050 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1051 is nonzero, we must take the MAX of the end position of this field
1052 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1053
1054 We return an expression for the size. */
1055
1056 static tree
1057 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1058 bool has_rep)
1059 {
1060 tree type = TREE_TYPE (last_size);
1061 tree new;
1062
1063 if (!special || TREE_CODE (size) != COND_EXPR)
1064 {
1065 new = size_binop (PLUS_EXPR, first_bit, size);
1066 if (has_rep)
1067 new = size_binop (MAX_EXPR, last_size, new);
1068 }
1069
1070 else
1071 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1072 integer_zerop (TREE_OPERAND (size, 1))
1073 ? last_size : merge_sizes (last_size, first_bit,
1074 TREE_OPERAND (size, 1),
1075 1, has_rep),
1076 integer_zerop (TREE_OPERAND (size, 2))
1077 ? last_size : merge_sizes (last_size, first_bit,
1078 TREE_OPERAND (size, 2),
1079 1, has_rep)));
1080
1081 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1082 when fed through substitute_in_expr) into thinking that a constant
1083 size is not constant. */
1084 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1085 new = TREE_OPERAND (new, 0);
1086
1087 return new;
1088 }
1089
1090 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1091 related by the addition of a constant. Return that constant if so. */
1092
1093 static tree
1094 compute_related_constant (tree op0, tree op1)
1095 {
1096 tree op0_var, op1_var;
1097 tree op0_con = split_plus (op0, &op0_var);
1098 tree op1_con = split_plus (op1, &op1_var);
1099 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1100
1101 if (operand_equal_p (op0_var, op1_var, 0))
1102 return result;
1103 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1104 return result;
1105 else
1106 return 0;
1107 }
1108
1109 /* Utility function of above to split a tree OP which may be a sum, into a
1110 constant part, which is returned, and a variable part, which is stored
1111 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1112 bitsizetype. */
1113
1114 static tree
1115 split_plus (tree in, tree *pvar)
1116 {
1117 /* Strip NOPS in order to ease the tree traversal and maximize the
1118 potential for constant or plus/minus discovery. We need to be careful
1119 to always return and set *pvar to bitsizetype trees, but it's worth
1120 the effort. */
1121 STRIP_NOPS (in);
1122
1123 *pvar = convert (bitsizetype, in);
1124
1125 if (TREE_CODE (in) == INTEGER_CST)
1126 {
1127 *pvar = bitsize_zero_node;
1128 return convert (bitsizetype, in);
1129 }
1130 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1131 {
1132 tree lhs_var, rhs_var;
1133 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1134 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1135
1136 if (lhs_var == TREE_OPERAND (in, 0)
1137 && rhs_var == TREE_OPERAND (in, 1))
1138 return bitsize_zero_node;
1139
1140 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1141 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1142 }
1143 else
1144 return bitsize_zero_node;
1145 }
1146 \f
1147 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1148 subprogram. If it is void_type_node, then we are dealing with a procedure,
1149 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1150 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1151 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1152 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1153 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1154 RETURNS_WITH_DSP is nonzero if the function is to return with a
1155 depressed stack pointer. */
1156
1157 tree
1158 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1159 bool returns_unconstrained, bool returns_by_ref,
1160 bool returns_with_dsp)
1161 {
1162 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1163 the subprogram formal parameters. This list is generated by traversing the
1164 input list of PARM_DECL nodes. */
1165 tree param_type_list = NULL;
1166 tree param_decl;
1167 tree type;
1168
1169 for (param_decl = param_decl_list; param_decl;
1170 param_decl = TREE_CHAIN (param_decl))
1171 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1172 param_type_list);
1173
1174 /* The list of the function parameter types has to be terminated by the void
1175 type to signal to the back-end that we are not dealing with a variable
1176 parameter subprogram, but that the subprogram has a fixed number of
1177 parameters. */
1178 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1179
1180 /* The list of argument types has been created in reverse
1181 so nreverse it. */
1182 param_type_list = nreverse (param_type_list);
1183
1184 type = build_function_type (return_type, param_type_list);
1185
1186 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1187 or the new type should, make a copy of TYPE. Likewise for
1188 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1189 if (TYPE_CI_CO_LIST (type) || cico_list
1190 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1191 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1192 type = copy_type (type);
1193
1194 SET_TYPE_CI_CO_LIST (type, cico_list);
1195 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1196 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1197 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1198 return type;
1199 }
1200 \f
1201 /* Return a copy of TYPE but safe to modify in any way. */
1202
1203 tree
1204 copy_type (tree type)
1205 {
1206 tree new = copy_node (type);
1207
1208 /* copy_node clears this field instead of copying it, because it is
1209 aliased with TREE_CHAIN. */
1210 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1211
1212 TYPE_POINTER_TO (new) = 0;
1213 TYPE_REFERENCE_TO (new) = 0;
1214 TYPE_MAIN_VARIANT (new) = new;
1215 TYPE_NEXT_VARIANT (new) = 0;
1216
1217 return new;
1218 }
1219 \f
1220 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1221 TYPE_INDEX_TYPE is INDEX. */
1222
1223 tree
1224 create_index_type (tree min, tree max, tree index)
1225 {
1226 /* First build a type for the desired range. */
1227 tree type = build_index_2_type (min, max);
1228
1229 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1230 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1231 is set, but not to INDEX, make a copy of this type with the requested
1232 index type. Note that we have no way of sharing these types, but that's
1233 only a small hole. */
1234 if (TYPE_INDEX_TYPE (type) == index)
1235 return type;
1236 else if (TYPE_INDEX_TYPE (type))
1237 type = copy_type (type);
1238
1239 SET_TYPE_INDEX_TYPE (type, index);
1240 create_type_decl (NULL_TREE, type, NULL, true, false, Empty);
1241 return type;
1242 }
1243 \f
1244 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1245 string) and TYPE is a ..._TYPE node giving its data type.
1246 ARTIFICIAL_P is true if this is a declaration that was generated
1247 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1248 information about this type. GNAT_NODE is used for the position of
1249 the decl. */
1250
1251 tree
1252 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1253 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1254 {
1255 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1256 enum tree_code code = TREE_CODE (type);
1257
1258 DECL_ARTIFICIAL (type_decl) = artificial_p;
1259
1260 process_attributes (type_decl, attr_list);
1261
1262 /* Pass type declaration information to the debugger unless this is an
1263 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1264 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1265 a dummy type, which will be completed later, or a type for which
1266 debugging information was not requested. */
1267 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1268 || !debug_info_p)
1269 DECL_IGNORED_P (type_decl) = 1;
1270 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1271 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1272 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1273 rest_of_decl_compilation (type_decl, global_bindings_p (), 0);
1274
1275 if (!TYPE_IS_DUMMY_P (type))
1276 gnat_pushdecl (type_decl, gnat_node);
1277
1278 return type_decl;
1279 }
1280
1281 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1282 ASM_NAME is its assembler name (if provided). TYPE is its data type
1283 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1284 expression; NULL_TREE if none.
1285
1286 CONST_FLAG is true if this variable is constant.
1287
1288 PUBLIC_FLAG is true if this definition is to be made visible outside of
1289 the current compilation unit. This flag should be set when processing the
1290 variable definitions in a package specification. EXTERN_FLAG is nonzero
1291 when processing an external variable declaration (as opposed to a
1292 definition: no storage is to be allocated for the variable here).
1293
1294 STATIC_FLAG is only relevant when not at top level. In that case
1295 it indicates whether to always allocate storage to the variable.
1296
1297 GNAT_NODE is used for the position of the decl. */
1298
1299 tree
1300 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1301 bool const_flag, bool public_flag, bool extern_flag,
1302 bool static_flag, struct attrib *attr_list, Node_Id gnat_node)
1303 {
1304 bool init_const
1305 = (!var_init
1306 ? false
1307 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1308 && (global_bindings_p () || static_flag
1309 ? 0 != initializer_constant_valid_p (var_init,
1310 TREE_TYPE (var_init))
1311 : TREE_CONSTANT (var_init))));
1312 tree var_decl
1313 = build_decl ((const_flag && init_const
1314 /* Only make a CONST_DECL for sufficiently-small objects.
1315 We consider complex double "sufficiently-small" */
1316 && TYPE_SIZE (type) != 0
1317 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1318 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1319 GET_MODE_SIZE (DCmode)))
1320 ? CONST_DECL : VAR_DECL, var_name, type);
1321
1322 /* If this is external, throw away any initializations unless this is a
1323 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1324 If we are defining a global here, leave a constant initialization and
1325 save any variable elaborations for the elaboration routine. If we are
1326 just annotating types, throw away the initialization if it isn't a
1327 constant. */
1328 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1329 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1330 var_init = NULL_TREE;
1331
1332 DECL_INITIAL (var_decl) = var_init;
1333 TREE_READONLY (var_decl) = const_flag;
1334 DECL_EXTERNAL (var_decl) = extern_flag;
1335 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1336 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1337 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1338 = TYPE_VOLATILE (type);
1339
1340 /* At the global binding level we need to allocate static storage for the
1341 variable if and only if its not external. If we are not at the top level
1342 we allocate automatic storage unless requested not to. */
1343 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1344
1345 if (asm_name)
1346 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1347
1348 process_attributes (var_decl, attr_list);
1349
1350 /* Add this decl to the current binding level. */
1351 gnat_pushdecl (var_decl, gnat_node);
1352
1353 if (TREE_SIDE_EFFECTS (var_decl))
1354 TREE_ADDRESSABLE (var_decl) = 1;
1355
1356 if (TREE_CODE (var_decl) != CONST_DECL)
1357 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1358
1359 return var_decl;
1360 }
1361 \f
1362 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1363 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1364 this field is in a record type with a "pragma pack". If SIZE is nonzero
1365 it is the specified size for this field. If POS is nonzero, it is the bit
1366 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1367 the address of this field for aliasing purposes. */
1368
1369 tree
1370 create_field_decl (tree field_name, tree field_type, tree record_type,
1371 int packed, tree size, tree pos, int addressable)
1372 {
1373 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1374
1375 DECL_CONTEXT (field_decl) = record_type;
1376 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1377
1378 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1379 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1380 if (packed && TYPE_MODE (field_type) == BLKmode)
1381 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1382
1383 /* If a size is specified, use it. Otherwise, if the record type is packed
1384 compute a size to use, which may differ from the object's natural size.
1385 We always set a size in this case to trigger the checks for bitfield
1386 creation below, which is typically required when no position has been
1387 specified. */
1388 if (size)
1389 size = convert (bitsizetype, size);
1390 else if (packed == 1)
1391 {
1392 size = rm_size (field_type);
1393
1394 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1395 byte. */
1396 if (TREE_CODE (size) == INTEGER_CST
1397 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1398 size = round_up (size, BITS_PER_UNIT);
1399 }
1400
1401 /* Make a bitfield if a size is specified for two reasons: first if the size
1402 differs from the natural size. Second, if the alignment is insufficient.
1403 There are a number of ways the latter can be true.
1404
1405 We never make a bitfield if the type of the field has a nonconstant size,
1406 or if it is claimed to be addressable, because no such entity requiring
1407 bitfield operations should reach here.
1408
1409 We do *preventively* make a bitfield when there might be the need for it
1410 but we don't have all the necessary information to decide, as is the case
1411 of a field with no specified position in a packed record.
1412
1413 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1414 in layout_decl or finish_record_type to clear the bit_field indication if
1415 it is in fact not needed. */
1416 if (size && TREE_CODE (size) == INTEGER_CST
1417 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1418 && !addressable
1419 && (!operand_equal_p (TYPE_SIZE (field_type), size, 0)
1420 || (pos
1421 && !value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1422 bitsize_int (TYPE_ALIGN
1423 (field_type)))))
1424 || packed
1425 || (TYPE_ALIGN (record_type) != 0
1426 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1427 {
1428 DECL_BIT_FIELD (field_decl) = 1;
1429 DECL_SIZE (field_decl) = size;
1430 if (!packed && !pos)
1431 DECL_ALIGN (field_decl)
1432 = (TYPE_ALIGN (record_type) != 0
1433 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1434 : TYPE_ALIGN (field_type));
1435 }
1436
1437 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1438 DECL_ALIGN (field_decl)
1439 = MAX (DECL_ALIGN (field_decl),
1440 DECL_BIT_FIELD (field_decl) ? 1
1441 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1442 : TYPE_ALIGN (field_type));
1443
1444 if (pos)
1445 {
1446 /* We need to pass in the alignment the DECL is known to have.
1447 This is the lowest-order bit set in POS, but no more than
1448 the alignment of the record, if one is specified. Note
1449 that an alignment of 0 is taken as infinite. */
1450 unsigned int known_align;
1451
1452 if (host_integerp (pos, 1))
1453 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1454 else
1455 known_align = BITS_PER_UNIT;
1456
1457 if (TYPE_ALIGN (record_type)
1458 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1459 known_align = TYPE_ALIGN (record_type);
1460
1461 layout_decl (field_decl, known_align);
1462 SET_DECL_OFFSET_ALIGN (field_decl,
1463 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1464 : BITS_PER_UNIT);
1465 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1466 &DECL_FIELD_BIT_OFFSET (field_decl),
1467 DECL_OFFSET_ALIGN (field_decl), pos);
1468
1469 DECL_HAS_REP_P (field_decl) = 1;
1470 }
1471
1472 /* If the field type is passed by reference, we will have pointers to the
1473 field, so it is addressable. */
1474 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1475 addressable = 1;
1476
1477 /* ??? For now, we say that any field of aggregate type is addressable
1478 because the front end may take 'Reference of it. */
1479 if (AGGREGATE_TYPE_P (field_type))
1480 addressable = 1;
1481
1482 /* Mark the decl as nonaddressable if it is indicated so semantically,
1483 meaning we won't ever attempt to take the address of the field.
1484
1485 It may also be "technically" nonaddressable, meaning that even if we
1486 attempt to take the field's address we will actually get the address of a
1487 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1488 we have at this point is not accurate enough, so we don't account for
1489 this here and let finish_record_type decide. */
1490 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1491
1492 return field_decl;
1493 }
1494
1495 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1496 effects, has the value of zero. */
1497
1498 static bool
1499 value_zerop (tree exp)
1500 {
1501 if (TREE_CODE (exp) == COMPOUND_EXPR)
1502 return value_zerop (TREE_OPERAND (exp, 1));
1503
1504 return integer_zerop (exp);
1505 }
1506 \f
1507 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1508 PARAM_TYPE is its type. READONLY is true if the parameter is
1509 readonly (either an IN parameter or an address of a pass-by-ref
1510 parameter). */
1511
1512 tree
1513 create_param_decl (tree param_name, tree param_type, bool readonly)
1514 {
1515 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1516
1517 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1518 lead to various ABI violations. */
1519 if (targetm.calls.promote_prototypes (param_type)
1520 && (TREE_CODE (param_type) == INTEGER_TYPE
1521 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1522 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1523 {
1524 /* We have to be careful about biased types here. Make a subtype
1525 of integer_type_node with the proper biasing. */
1526 if (TREE_CODE (param_type) == INTEGER_TYPE
1527 && TYPE_BIASED_REPRESENTATION_P (param_type))
1528 {
1529 param_type
1530 = copy_type (build_range_type (integer_type_node,
1531 TYPE_MIN_VALUE (param_type),
1532 TYPE_MAX_VALUE (param_type)));
1533
1534 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1535 }
1536 else
1537 param_type = integer_type_node;
1538 }
1539
1540 DECL_ARG_TYPE (param_decl) = param_type;
1541 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1542 TREE_READONLY (param_decl) = readonly;
1543 return param_decl;
1544 }
1545 \f
1546 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1547
1548 void
1549 process_attributes (tree decl, struct attrib *attr_list)
1550 {
1551 for (; attr_list; attr_list = attr_list->next)
1552 switch (attr_list->type)
1553 {
1554 case ATTR_MACHINE_ATTRIBUTE:
1555 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1556 NULL_TREE),
1557 ATTR_FLAG_TYPE_IN_PLACE);
1558 break;
1559
1560 case ATTR_LINK_ALIAS:
1561 TREE_STATIC (decl) = 1;
1562 assemble_alias (decl, attr_list->name);
1563 break;
1564
1565 case ATTR_WEAK_EXTERNAL:
1566 if (SUPPORTS_WEAK)
1567 declare_weak (decl);
1568 else
1569 post_error ("?weak declarations not supported on this target",
1570 attr_list->error_point);
1571 break;
1572
1573 case ATTR_LINK_SECTION:
1574 if (targetm.have_named_sections)
1575 {
1576 DECL_SECTION_NAME (decl)
1577 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1578 IDENTIFIER_POINTER (attr_list->name));
1579 }
1580 else
1581 post_error ("?section attributes are not supported for this target",
1582 attr_list->error_point);
1583 break;
1584 }
1585 }
1586 \f
1587 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1588 a power of 2. */
1589
1590 static bool
1591 value_factor_p (tree value, HOST_WIDE_INT factor)
1592 {
1593 if (host_integerp (value, 1))
1594 return tree_low_cst (value, 1) % factor == 0;
1595
1596 if (TREE_CODE (value) == MULT_EXPR)
1597 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1598 || value_factor_p (TREE_OPERAND (value, 1), factor));
1599
1600 return 0;
1601 }
1602
1603 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1604 unless we can prove these 2 fields are laid out in such a way that no gap
1605 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1606 is the distance in bits between the end of PREV_FIELD and the starting
1607 position of CURR_FIELD. It is ignored if null. */
1608
1609 static bool
1610 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1611 {
1612 /* If this is the first field of the record, there cannot be any gap */
1613 if (!prev_field)
1614 return false;
1615
1616 /* If the previous field is a union type, then return False: The only
1617 time when such a field is not the last field of the record is when
1618 there are other components at fixed positions after it (meaning there
1619 was a rep clause for every field), in which case we don't want the
1620 alignment constraint to override them. */
1621 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1622 return false;
1623
1624 /* If the distance between the end of prev_field and the begining of
1625 curr_field is constant, then there is a gap if the value of this
1626 constant is not null. */
1627 if (offset && host_integerp (offset, 1))
1628 return !integer_zerop (offset);
1629
1630 /* If the size and position of the previous field are constant,
1631 then check the sum of this size and position. There will be a gap
1632 iff it is not multiple of the current field alignment. */
1633 if (host_integerp (DECL_SIZE (prev_field), 1)
1634 && host_integerp (bit_position (prev_field), 1))
1635 return ((tree_low_cst (bit_position (prev_field), 1)
1636 + tree_low_cst (DECL_SIZE (prev_field), 1))
1637 % DECL_ALIGN (curr_field) != 0);
1638
1639 /* If both the position and size of the previous field are multiples
1640 of the current field alignment, there can not be any gap. */
1641 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1642 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1643 return false;
1644
1645 /* Fallback, return that there may be a potential gap */
1646 return true;
1647 }
1648
1649 /* Returns a LABEL_DECL node for LABEL_NAME. */
1650
1651 tree
1652 create_label_decl (tree label_name)
1653 {
1654 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1655
1656 DECL_CONTEXT (label_decl) = current_function_decl;
1657 DECL_MODE (label_decl) = VOIDmode;
1658 DECL_SOURCE_LOCATION (label_decl) = input_location;
1659
1660 return label_decl;
1661 }
1662 \f
1663 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1664 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1665 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1666 PARM_DECL nodes chained through the TREE_CHAIN field).
1667
1668 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1669 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1670
1671 tree
1672 create_subprog_decl (tree subprog_name, tree asm_name,
1673 tree subprog_type, tree param_decl_list, bool inline_flag,
1674 bool public_flag, bool extern_flag,
1675 struct attrib *attr_list, Node_Id gnat_node)
1676 {
1677 tree return_type = TREE_TYPE (subprog_type);
1678 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1679
1680 /* If this is a function nested inside an inlined external function, it
1681 means we aren't going to compile the outer function unless it is
1682 actually inlined, so do the same for us. */
1683 if (current_function_decl && DECL_INLINE (current_function_decl)
1684 && DECL_EXTERNAL (current_function_decl))
1685 extern_flag = true;
1686
1687 DECL_EXTERNAL (subprog_decl) = extern_flag;
1688 TREE_PUBLIC (subprog_decl) = public_flag;
1689 TREE_STATIC (subprog_decl) = 1;
1690 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1691 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1692 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1693 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1694 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1695 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1696 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1697
1698 if (inline_flag)
1699 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1700
1701 if (asm_name)
1702 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1703
1704 process_attributes (subprog_decl, attr_list);
1705
1706 /* Add this decl to the current binding level. */
1707 gnat_pushdecl (subprog_decl, gnat_node);
1708
1709 /* Output the assembler code and/or RTL for the declaration. */
1710 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1711
1712 return subprog_decl;
1713 }
1714 \f
1715 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1716 body. This routine needs to be invoked before processing the declarations
1717 appearing in the subprogram. */
1718
1719 void
1720 begin_subprog_body (tree subprog_decl)
1721 {
1722 tree param_decl;
1723
1724 current_function_decl = subprog_decl;
1725 announce_function (subprog_decl);
1726
1727 /* Enter a new binding level and show that all the parameters belong to
1728 this function. */
1729 gnat_pushlevel ();
1730 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1731 param_decl = TREE_CHAIN (param_decl))
1732 DECL_CONTEXT (param_decl) = subprog_decl;
1733
1734 make_decl_rtl (subprog_decl, NULL);
1735
1736 /* We handle pending sizes via the elaboration of types, so we don't need to
1737 save them. This causes them to be marked as part of the outer function
1738 and then discarded. */
1739 get_pending_sizes ();
1740 }
1741
1742 /* Finish the definition of the current subprogram and compile it all the way
1743 to assembler language output. BODY is the tree corresponding to
1744 the subprogram. */
1745
1746 void
1747 end_subprog_body (tree body)
1748 {
1749 tree fndecl = current_function_decl;
1750
1751 /* Mark the BLOCK for this level as being for this function and pop the
1752 level. Since the vars in it are the parameters, clear them. */
1753 BLOCK_VARS (current_binding_level->block) = 0;
1754 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1755 DECL_INITIAL (fndecl) = current_binding_level->block;
1756 gnat_poplevel ();
1757
1758 /* Deal with inline. If declared inline or we should default to inline,
1759 set the flag in the decl. */
1760 DECL_INLINE (fndecl)
1761 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1762
1763 /* We handle pending sizes via the elaboration of types, so we don't
1764 need to save them. */
1765 get_pending_sizes ();
1766
1767 /* Mark the RESULT_DECL as being in this subprogram. */
1768 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1769
1770 DECL_SAVED_TREE (fndecl) = body;
1771
1772 current_function_decl = DECL_CONTEXT (fndecl);
1773 cfun = NULL;
1774
1775 /* If we're only annotating types, don't actually compile this function. */
1776 if (type_annotate_only)
1777 return;
1778
1779 /* We do different things for nested and non-nested functions.
1780 ??? This should be in cgraph. */
1781 if (!DECL_CONTEXT (fndecl))
1782 {
1783 gnat_gimplify_function (fndecl);
1784 lower_nested_functions (fndecl);
1785 gnat_finalize (fndecl);
1786 }
1787 else
1788 /* Register this function with cgraph just far enough to get it
1789 added to our parent's nested function list. */
1790 (void) cgraph_node (fndecl);
1791 }
1792
1793 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1794
1795 static void
1796 gnat_gimplify_function (tree fndecl)
1797 {
1798 struct cgraph_node *cgn;
1799
1800 dump_function (TDI_original, fndecl);
1801 gimplify_function_tree (fndecl);
1802 dump_function (TDI_generic, fndecl);
1803
1804 /* Convert all nested functions to GIMPLE now. We do things in this order
1805 so that items like VLA sizes are expanded properly in the context of the
1806 correct function. */
1807 cgn = cgraph_node (fndecl);
1808 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1809 gnat_gimplify_function (cgn->decl);
1810 }
1811
1812 /* Give FNDECL and all its nested functions to cgraph for compilation. */
1813
1814 static void
1815 gnat_finalize (tree fndecl)
1816 {
1817 struct cgraph_node *cgn;
1818
1819 /* Finalize all nested functions now. */
1820 cgn = cgraph_node (fndecl);
1821 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
1822 gnat_finalize (cgn->decl);
1823
1824 cgraph_finalize_function (fndecl, false);
1825 }
1826 \f
1827 /* Return a definition for a builtin function named NAME and whose data type
1828 is TYPE. TYPE should be a function type with argument types.
1829 FUNCTION_CODE tells later passes how to compile calls to this function.
1830 See tree.h for its possible values.
1831
1832 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1833 the name to be called if we can't opencode the function. If
1834 ATTRS is nonzero, use that for the function attribute list. */
1835
1836 tree
1837 builtin_function (const char *name, tree type, int function_code,
1838 enum built_in_class class, const char *library_name,
1839 tree attrs)
1840 {
1841 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1842
1843 DECL_EXTERNAL (decl) = 1;
1844 TREE_PUBLIC (decl) = 1;
1845 if (library_name)
1846 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1847
1848 gnat_pushdecl (decl, Empty);
1849 DECL_BUILT_IN_CLASS (decl) = class;
1850 DECL_FUNCTION_CODE (decl) = function_code;
1851 if (attrs)
1852 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1853 return decl;
1854 }
1855
1856 /* Return an integer type with the number of bits of precision given by
1857 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1858 it is a signed type. */
1859
1860 tree
1861 gnat_type_for_size (unsigned precision, int unsignedp)
1862 {
1863 tree t;
1864 char type_name[20];
1865
1866 if (precision <= 2 * MAX_BITS_PER_WORD
1867 && signed_and_unsigned_types[precision][unsignedp])
1868 return signed_and_unsigned_types[precision][unsignedp];
1869
1870 if (unsignedp)
1871 t = make_unsigned_type (precision);
1872 else
1873 t = make_signed_type (precision);
1874
1875 if (precision <= 2 * MAX_BITS_PER_WORD)
1876 signed_and_unsigned_types[precision][unsignedp] = t;
1877
1878 if (!TYPE_NAME (t))
1879 {
1880 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1881 TYPE_NAME (t) = get_identifier (type_name);
1882 }
1883
1884 return t;
1885 }
1886
1887 /* Likewise for floating-point types. */
1888
1889 static tree
1890 float_type_for_precision (int precision, enum machine_mode mode)
1891 {
1892 tree t;
1893 char type_name[20];
1894
1895 if (float_types[(int) mode])
1896 return float_types[(int) mode];
1897
1898 float_types[(int) mode] = t = make_node (REAL_TYPE);
1899 TYPE_PRECISION (t) = precision;
1900 layout_type (t);
1901
1902 if (TYPE_MODE (t) != mode)
1903 abort ();
1904
1905 if (!TYPE_NAME (t))
1906 {
1907 sprintf (type_name, "FLOAT_%d", precision);
1908 TYPE_NAME (t) = get_identifier (type_name);
1909 }
1910
1911 return t;
1912 }
1913
1914 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1915 an unsigned type; otherwise a signed type is returned. */
1916
1917 tree
1918 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1919 {
1920 if (mode == BLKmode)
1921 return NULL_TREE;
1922 else if (mode == VOIDmode)
1923 return void_type_node;
1924 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1925 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1926 else
1927 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1928 }
1929
1930 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1931
1932 tree
1933 gnat_unsigned_type (tree type_node)
1934 {
1935 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1936
1937 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1938 {
1939 type = copy_node (type);
1940 TREE_TYPE (type) = type_node;
1941 }
1942 else if (TREE_TYPE (type_node)
1943 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1944 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1945 {
1946 type = copy_node (type);
1947 TREE_TYPE (type) = TREE_TYPE (type_node);
1948 }
1949
1950 return type;
1951 }
1952
1953 /* Return the signed version of a TYPE_NODE, a scalar type. */
1954
1955 tree
1956 gnat_signed_type (tree type_node)
1957 {
1958 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1959
1960 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1961 {
1962 type = copy_node (type);
1963 TREE_TYPE (type) = type_node;
1964 }
1965 else if (TREE_TYPE (type_node)
1966 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1967 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1968 {
1969 type = copy_node (type);
1970 TREE_TYPE (type) = TREE_TYPE (type_node);
1971 }
1972
1973 return type;
1974 }
1975
1976 /* Return a type the same as TYPE except unsigned or signed according to
1977 UNSIGNEDP. */
1978
1979 tree
1980 gnat_signed_or_unsigned_type (int unsignedp, tree type)
1981 {
1982 if (!INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
1983 return type;
1984 else
1985 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
1986 }
1987 \f
1988 /* EXP is an expression for the size of an object. If this size contains
1989 discriminant references, replace them with the maximum (if MAX_P) or
1990 minimum (if !MAX_P) possible value of the discriminant. */
1991
1992 tree
1993 max_size (tree exp, bool max_p)
1994 {
1995 enum tree_code code = TREE_CODE (exp);
1996 tree type = TREE_TYPE (exp);
1997
1998 switch (TREE_CODE_CLASS (code))
1999 {
2000 case 'd':
2001 case 'c':
2002 return exp;
2003
2004 case 'x':
2005 if (code == TREE_LIST)
2006 return tree_cons (TREE_PURPOSE (exp),
2007 max_size (TREE_VALUE (exp), max_p),
2008 TREE_CHAIN (exp)
2009 ? max_size (TREE_CHAIN (exp), max_p) : NULL_TREE);
2010 break;
2011
2012 case 'r':
2013 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2014 modify. Otherwise, we treat it like a variable. */
2015 if (!CONTAINS_PLACEHOLDER_P (exp))
2016 return exp;
2017
2018 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2019 return
2020 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2021
2022 case '<':
2023 return max_p ? size_one_node : size_zero_node;
2024
2025 case '1':
2026 case '2':
2027 case 'e':
2028 switch (TREE_CODE_LENGTH (code))
2029 {
2030 case 1:
2031 if (code == NON_LVALUE_EXPR)
2032 return max_size (TREE_OPERAND (exp, 0), max_p);
2033 else
2034 return
2035 fold (build1 (code, type,
2036 max_size (TREE_OPERAND (exp, 0),
2037 code == NEGATE_EXPR ? !max_p : max_p)));
2038
2039 case 2:
2040 if (code == COMPOUND_EXPR)
2041 return max_size (TREE_OPERAND (exp, 1), max_p);
2042
2043 {
2044 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2045 tree rhs = max_size (TREE_OPERAND (exp, 1),
2046 code == MINUS_EXPR ? !max_p : max_p);
2047
2048 /* Special-case wanting the maximum value of a MIN_EXPR.
2049 In that case, if one side overflows, return the other.
2050 sizetype is signed, but we know sizes are non-negative.
2051 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2052 overflowing or the maximum possible value and the RHS
2053 a variable. */
2054 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2055 return lhs;
2056 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2057 return rhs;
2058 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2059 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2060 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2061 && !TREE_CONSTANT (rhs))
2062 return lhs;
2063 else
2064 return fold (build (code, type, lhs, rhs));
2065 }
2066
2067 case 3:
2068 if (code == SAVE_EXPR)
2069 return exp;
2070 else if (code == COND_EXPR)
2071 return fold (build (max_p ? MAX_EXPR : MIN_EXPR, type,
2072 max_size (TREE_OPERAND (exp, 1), max_p),
2073 max_size (TREE_OPERAND (exp, 2), max_p)));
2074 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1))
2075 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2076 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2077 }
2078 }
2079
2080 abort ();
2081 }
2082 \f
2083 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2084 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2085 Return a constructor for the template. */
2086
2087 tree
2088 build_template (tree template_type, tree array_type, tree expr)
2089 {
2090 tree template_elts = NULL_TREE;
2091 tree bound_list = NULL_TREE;
2092 tree field;
2093
2094 if (TREE_CODE (array_type) == RECORD_TYPE
2095 && (TYPE_IS_PADDING_P (array_type)
2096 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2097 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2098
2099 if (TREE_CODE (array_type) == ARRAY_TYPE
2100 || (TREE_CODE (array_type) == INTEGER_TYPE
2101 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2102 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2103
2104 /* First make the list for a CONSTRUCTOR for the template. Go down the
2105 field list of the template instead of the type chain because this
2106 array might be an Ada array of arrays and we can't tell where the
2107 nested arrays stop being the underlying object. */
2108
2109 for (field = TYPE_FIELDS (template_type); field;
2110 (bound_list
2111 ? (bound_list = TREE_CHAIN (bound_list))
2112 : (array_type = TREE_TYPE (array_type))),
2113 field = TREE_CHAIN (TREE_CHAIN (field)))
2114 {
2115 tree bounds, min, max;
2116
2117 /* If we have a bound list, get the bounds from there. Likewise
2118 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2119 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2120 This will give us a maximum range. */
2121 if (bound_list)
2122 bounds = TREE_VALUE (bound_list);
2123 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2124 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2125 else if (expr && TREE_CODE (expr) == PARM_DECL
2126 && DECL_BY_COMPONENT_PTR_P (expr))
2127 bounds = TREE_TYPE (field);
2128 else
2129 abort ();
2130
2131 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2132 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2133
2134 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2135 substitute it from OBJECT. */
2136 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2137 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2138
2139 template_elts = tree_cons (TREE_CHAIN (field), max,
2140 tree_cons (field, min, template_elts));
2141 }
2142
2143 return gnat_build_constructor (template_type, nreverse (template_elts));
2144 }
2145 \f
2146 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2147 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2148 in the type contains in its DECL_INITIAL the expression to use when
2149 a constructor is made for the type. GNAT_ENTITY is an entity used
2150 to print out an error message if the mechanism cannot be applied to
2151 an object of that type and also for the name. */
2152
2153 tree
2154 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2155 {
2156 tree record_type = make_node (RECORD_TYPE);
2157 tree field_list = 0;
2158 int class;
2159 int dtype = 0;
2160 tree inner_type;
2161 int ndim;
2162 int i;
2163 tree *idx_arr;
2164 tree tem;
2165
2166 /* If TYPE is an unconstrained array, use the underlying array type. */
2167 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2168 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2169
2170 /* If this is an array, compute the number of dimensions in the array,
2171 get the index types, and point to the inner type. */
2172 if (TREE_CODE (type) != ARRAY_TYPE)
2173 ndim = 0;
2174 else
2175 for (ndim = 1, inner_type = type;
2176 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2177 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2178 ndim++, inner_type = TREE_TYPE (inner_type))
2179 ;
2180
2181 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2182
2183 if (mech != By_Descriptor_NCA
2184 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2185 for (i = ndim - 1, inner_type = type;
2186 i >= 0;
2187 i--, inner_type = TREE_TYPE (inner_type))
2188 idx_arr[i] = TYPE_DOMAIN (inner_type);
2189 else
2190 for (i = 0, inner_type = type;
2191 i < ndim;
2192 i++, inner_type = TREE_TYPE (inner_type))
2193 idx_arr[i] = TYPE_DOMAIN (inner_type);
2194
2195 /* Now get the DTYPE value. */
2196 switch (TREE_CODE (type))
2197 {
2198 case INTEGER_TYPE:
2199 case ENUMERAL_TYPE:
2200 if (TYPE_VAX_FLOATING_POINT_P (type))
2201 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2202 {
2203 case 6:
2204 dtype = 10;
2205 break;
2206 case 9:
2207 dtype = 11;
2208 break;
2209 case 15:
2210 dtype = 27;
2211 break;
2212 }
2213 else
2214 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2215 {
2216 case 8:
2217 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2218 break;
2219 case 16:
2220 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2221 break;
2222 case 32:
2223 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2224 break;
2225 case 64:
2226 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2227 break;
2228 case 128:
2229 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2230 break;
2231 }
2232 break;
2233
2234 case REAL_TYPE:
2235 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2236 break;
2237
2238 case COMPLEX_TYPE:
2239 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2240 && TYPE_VAX_FLOATING_POINT_P (type))
2241 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2242 {
2243 case 6:
2244 dtype = 12;
2245 break;
2246 case 9:
2247 dtype = 13;
2248 break;
2249 case 15:
2250 dtype = 29;
2251 }
2252 else
2253 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2254 break;
2255
2256 case ARRAY_TYPE:
2257 dtype = 14;
2258 break;
2259
2260 default:
2261 break;
2262 }
2263
2264 /* Get the CLASS value. */
2265 switch (mech)
2266 {
2267 case By_Descriptor_A:
2268 class = 4;
2269 break;
2270 case By_Descriptor_NCA:
2271 class = 10;
2272 break;
2273 case By_Descriptor_SB:
2274 class = 15;
2275 break;
2276 default:
2277 class = 1;
2278 }
2279
2280 /* Make the type for a descriptor for VMS. The first four fields
2281 are the same for all types. */
2282
2283 field_list
2284 = chainon (field_list,
2285 make_descriptor_field
2286 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2287 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2288
2289 field_list = chainon (field_list,
2290 make_descriptor_field ("DTYPE",
2291 gnat_type_for_size (8, 1),
2292 record_type, size_int (dtype)));
2293 field_list = chainon (field_list,
2294 make_descriptor_field ("CLASS",
2295 gnat_type_for_size (8, 1),
2296 record_type, size_int (class)));
2297
2298 field_list
2299 = chainon (field_list,
2300 make_descriptor_field
2301 ("POINTER",
2302 build_pointer_type_for_mode (type, SImode, false), record_type,
2303 build1 (ADDR_EXPR,
2304 build_pointer_type_for_mode (type, SImode, false),
2305 build (PLACEHOLDER_EXPR, type))));
2306
2307 switch (mech)
2308 {
2309 case By_Descriptor:
2310 case By_Descriptor_S:
2311 break;
2312
2313 case By_Descriptor_SB:
2314 field_list
2315 = chainon (field_list,
2316 make_descriptor_field
2317 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2318 TREE_CODE (type) == ARRAY_TYPE
2319 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2320 field_list
2321 = chainon (field_list,
2322 make_descriptor_field
2323 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2324 TREE_CODE (type) == ARRAY_TYPE
2325 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2326 break;
2327
2328 case By_Descriptor_A:
2329 case By_Descriptor_NCA:
2330 field_list = chainon (field_list,
2331 make_descriptor_field ("SCALE",
2332 gnat_type_for_size (8, 1),
2333 record_type,
2334 size_zero_node));
2335
2336 field_list = chainon (field_list,
2337 make_descriptor_field ("DIGITS",
2338 gnat_type_for_size (8, 1),
2339 record_type,
2340 size_zero_node));
2341
2342 field_list
2343 = chainon (field_list,
2344 make_descriptor_field
2345 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2346 size_int (mech == By_Descriptor_NCA
2347 ? 0
2348 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2349 : (TREE_CODE (type) == ARRAY_TYPE
2350 && TYPE_CONVENTION_FORTRAN_P (type)
2351 ? 224 : 192))));
2352
2353 field_list = chainon (field_list,
2354 make_descriptor_field ("DIMCT",
2355 gnat_type_for_size (8, 1),
2356 record_type,
2357 size_int (ndim)));
2358
2359 field_list = chainon (field_list,
2360 make_descriptor_field ("ARSIZE",
2361 gnat_type_for_size (32, 1),
2362 record_type,
2363 size_in_bytes (type)));
2364
2365 /* Now build a pointer to the 0,0,0... element. */
2366 tem = build (PLACEHOLDER_EXPR, type);
2367 for (i = 0, inner_type = type; i < ndim;
2368 i++, inner_type = TREE_TYPE (inner_type))
2369 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2370 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2371 NULL_TREE, NULL_TREE);
2372
2373 field_list
2374 = chainon (field_list,
2375 make_descriptor_field
2376 ("A0",
2377 build_pointer_type_for_mode (inner_type, SImode, false),
2378 record_type,
2379 build1 (ADDR_EXPR,
2380 build_pointer_type_for_mode (inner_type, SImode,
2381 false),
2382 tem)));
2383
2384 /* Next come the addressing coefficients. */
2385 tem = size_int (1);
2386 for (i = 0; i < ndim; i++)
2387 {
2388 char fname[3];
2389 tree idx_length
2390 = size_binop (MULT_EXPR, tem,
2391 size_binop (PLUS_EXPR,
2392 size_binop (MINUS_EXPR,
2393 TYPE_MAX_VALUE (idx_arr[i]),
2394 TYPE_MIN_VALUE (idx_arr[i])),
2395 size_int (1)));
2396
2397 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2398 fname[1] = '0' + i, fname[2] = 0;
2399 field_list
2400 = chainon (field_list,
2401 make_descriptor_field (fname,
2402 gnat_type_for_size (32, 1),
2403 record_type, idx_length));
2404
2405 if (mech == By_Descriptor_NCA)
2406 tem = idx_length;
2407 }
2408
2409 /* Finally here are the bounds. */
2410 for (i = 0; i < ndim; i++)
2411 {
2412 char fname[3];
2413
2414 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2415 field_list
2416 = chainon (field_list,
2417 make_descriptor_field
2418 (fname, gnat_type_for_size (32, 1), record_type,
2419 TYPE_MIN_VALUE (idx_arr[i])));
2420
2421 fname[0] = 'U';
2422 field_list
2423 = chainon (field_list,
2424 make_descriptor_field
2425 (fname, gnat_type_for_size (32, 1), record_type,
2426 TYPE_MAX_VALUE (idx_arr[i])));
2427 }
2428 break;
2429
2430 default:
2431 post_error ("unsupported descriptor type for &", gnat_entity);
2432 }
2433
2434 finish_record_type (record_type, field_list, false, true);
2435 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2436 NULL, true, false, gnat_entity);
2437
2438 return record_type;
2439 }
2440
2441 /* Utility routine for above code to make a field. */
2442
2443 static tree
2444 make_descriptor_field (const char *name, tree type,
2445 tree rec_type, tree initial)
2446 {
2447 tree field
2448 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2449
2450 DECL_INITIAL (field) = initial;
2451 return field;
2452 }
2453 \f
2454 /* Build a type to be used to represent an aliased object whose nominal
2455 type is an unconstrained array. This consists of a RECORD_TYPE containing
2456 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2457 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2458 is used to represent an arbitrary unconstrained object. Use NAME
2459 as the name of the record. */
2460
2461 tree
2462 build_unc_object_type (tree template_type, tree object_type, tree name)
2463 {
2464 tree type = make_node (RECORD_TYPE);
2465 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2466 template_type, type, 0, 0, 0, 1);
2467 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2468 type, 0, 0, 0, 1);
2469
2470 TYPE_NAME (type) = name;
2471 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2472 finish_record_type (type,
2473 chainon (chainon (NULL_TREE, template_field),
2474 array_field),
2475 false, false);
2476
2477 return type;
2478 }
2479 \f
2480 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2481 the normal case this is just two adjustments, but we have more to do
2482 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2483
2484 void
2485 update_pointer_to (tree old_type, tree new_type)
2486 {
2487 tree ptr = TYPE_POINTER_TO (old_type);
2488 tree ref = TYPE_REFERENCE_TO (old_type);
2489 tree ptr1, ref1;
2490 tree type;
2491
2492 /* If this is the main variant, process all the other variants first. */
2493 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2494 for (type = TYPE_NEXT_VARIANT (old_type); type;
2495 type = TYPE_NEXT_VARIANT (type))
2496 update_pointer_to (type, new_type);
2497
2498 /* If no pointer or reference, we are done. */
2499 if (!ptr && !ref)
2500 return;
2501
2502 /* Merge the old type qualifiers in the new type.
2503
2504 Each old variant has qualifiers for specific reasons, and the new
2505 designated type as well. Each set of qualifiers represents useful
2506 information grabbed at some point, and merging the two simply unifies
2507 these inputs into the final type description.
2508
2509 Consider for instance a volatile type frozen after an access to constant
2510 type designating it. After the designated type freeze, we get here with a
2511 volatile new_type and a dummy old_type with a readonly variant, created
2512 when the access type was processed. We shall make a volatile and readonly
2513 designated type, because that's what it really is.
2514
2515 We might also get here for a non-dummy old_type variant with different
2516 qualifiers than the new_type ones, for instance in some cases of pointers
2517 to private record type elaboration (see the comments around the call to
2518 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2519 qualifiers in thoses cases too, to avoid accidentally discarding the
2520 initial set, and will often end up with old_type == new_type then. */
2521 new_type = build_qualified_type (new_type,
2522 TYPE_QUALS (old_type)
2523 | TYPE_QUALS (new_type));
2524
2525 /* If the new type and the old one are identical, there is nothing to
2526 update. */
2527 if (old_type == new_type)
2528 return;
2529
2530 /* Otherwise, first handle the simple case. */
2531 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2532 {
2533 TYPE_POINTER_TO (new_type) = ptr;
2534 TYPE_REFERENCE_TO (new_type) = ref;
2535
2536 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2537 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2538 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2539 {
2540 TREE_TYPE (ptr1) = new_type;
2541
2542 if (TYPE_NAME (ptr1)
2543 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2544 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2545 rest_of_decl_compilation (TYPE_NAME (ptr1),
2546 global_bindings_p (), 0);
2547 }
2548
2549 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2550 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2551 ref1 = TYPE_NEXT_VARIANT (ref1))
2552 {
2553 TREE_TYPE (ref1) = new_type;
2554
2555 if (TYPE_NAME (ref1)
2556 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2557 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2558 rest_of_decl_compilation (TYPE_NAME (ref1),
2559 global_bindings_p (), 0);
2560 }
2561 }
2562
2563 /* Now deal with the unconstrained array case. In this case the "pointer"
2564 is actually a RECORD_TYPE where the types of both fields are
2565 pointers to void. In that case, copy the field list from the
2566 old type to the new one and update the fields' context. */
2567 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
2568 abort ();
2569
2570 else
2571 {
2572 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2573 tree ptr_temp_type;
2574 tree new_ref;
2575 tree var;
2576
2577 SET_DECL_ORIGINAL_FIELD (TYPE_FIELDS (ptr),
2578 TYPE_FIELDS (TYPE_POINTER_TO (new_type)));
2579 SET_DECL_ORIGINAL_FIELD (TREE_CHAIN (TYPE_FIELDS (ptr)),
2580 TREE_CHAIN (TYPE_FIELDS
2581 (TYPE_POINTER_TO (new_type))));
2582
2583 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2584 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2585 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2586
2587 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2588 template bounds.
2589
2590 ??? This is now the only use of gnat_substitute_in_type, which
2591 is now a very "heavy" routine to do this, so it should be replaced
2592 at some point. */
2593 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2594 new_ref = build (COMPONENT_REF, ptr_temp_type,
2595 build (PLACEHOLDER_EXPR, ptr),
2596 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2597
2598 update_pointer_to
2599 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2600 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2601 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2602
2603 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2604 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2605
2606 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2607 = TREE_TYPE (new_type) = ptr;
2608
2609 /* Now handle updating the allocation record, what the thin pointer
2610 points to. Update all pointers from the old record into the new
2611 one, update the types of the fields, and recompute the size. */
2612
2613 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2614
2615 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2616 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2617 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2618 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2619 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2620 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2621 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2622
2623 TYPE_SIZE (new_obj_rec)
2624 = size_binop (PLUS_EXPR,
2625 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2626 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2627 TYPE_SIZE_UNIT (new_obj_rec)
2628 = size_binop (PLUS_EXPR,
2629 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2630 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2631 rest_of_type_compilation (ptr, global_bindings_p ());
2632 }
2633 }
2634 \f
2635 /* Convert a pointer to a constrained array into a pointer to a fat
2636 pointer. This involves making or finding a template. */
2637
2638 static tree
2639 convert_to_fat_pointer (tree type, tree expr)
2640 {
2641 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2642 tree template, template_addr;
2643 tree etype = TREE_TYPE (expr);
2644
2645 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2646 pointer to the template and array. */
2647 if (integer_zerop (expr))
2648 return
2649 gnat_build_constructor
2650 (type,
2651 tree_cons (TYPE_FIELDS (type),
2652 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2653 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2654 convert (build_pointer_type (template_type),
2655 expr),
2656 NULL_TREE)));
2657
2658 /* If EXPR is a thin pointer, make the template and data from the record. */
2659
2660 else if (TYPE_THIN_POINTER_P (etype))
2661 {
2662 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2663
2664 expr = save_expr (expr);
2665 if (TREE_CODE (expr) == ADDR_EXPR)
2666 expr = TREE_OPERAND (expr, 0);
2667 else
2668 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2669
2670 template = build_component_ref (expr, NULL_TREE, fields, false);
2671 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2672 build_component_ref (expr, NULL_TREE,
2673 TREE_CHAIN (fields), false));
2674 }
2675 else
2676 /* Otherwise, build the constructor for the template. */
2677 template = build_template (template_type, TREE_TYPE (etype), expr);
2678
2679 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2680
2681 /* The result is a CONSTRUCTOR for the fat pointer.
2682
2683 If expr is an argument of a foreign convention subprogram, the type it
2684 points to is directly the component type. In this case, the expression
2685 type may not match the corresponding FIELD_DECL type at this point, so we
2686 call "convert" here to fix that up if necessary. This type consistency is
2687 required, for instance because it ensures that possible later folding of
2688 component_refs against this constructor always yields something of the
2689 same type as the initial reference.
2690
2691 Note that the call to "build_template" above is still fine, because it
2692 will only refer to the provided template_type in this case. */
2693 return
2694 gnat_build_constructor
2695 (type, tree_cons (TYPE_FIELDS (type),
2696 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2697 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2698 template_addr, NULL_TREE)));
2699 }
2700 \f
2701 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2702 is something that is a fat pointer, so convert to it first if it EXPR
2703 is not already a fat pointer. */
2704
2705 static tree
2706 convert_to_thin_pointer (tree type, tree expr)
2707 {
2708 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2709 expr
2710 = convert_to_fat_pointer
2711 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2712
2713 /* We get the pointer to the data and use a NOP_EXPR to make it the
2714 proper GCC type. */
2715 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
2716 false);
2717 expr = build1 (NOP_EXPR, type, expr);
2718
2719 return expr;
2720 }
2721 \f
2722 /* Create an expression whose value is that of EXPR,
2723 converted to type TYPE. The TREE_TYPE of the value
2724 is always TYPE. This function implements all reasonable
2725 conversions; callers should filter out those that are
2726 not permitted by the language being compiled. */
2727
2728 tree
2729 convert (tree type, tree expr)
2730 {
2731 enum tree_code code = TREE_CODE (type);
2732 tree etype = TREE_TYPE (expr);
2733 enum tree_code ecode = TREE_CODE (etype);
2734 tree tem;
2735
2736 /* If EXPR is already the right type, we are done. */
2737 if (type == etype)
2738 return expr;
2739
2740 /* If the input type has padding, remove it by doing a component reference
2741 to the field. If the output type has padding, make a constructor
2742 to build the record. If both input and output have padding and are
2743 of variable size, do this as an unchecked conversion. */
2744 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2745 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2746 && (!TREE_CONSTANT (TYPE_SIZE (type))
2747 || !TREE_CONSTANT (TYPE_SIZE (etype))))
2748 ;
2749 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2750 {
2751 /* If we have just converted to this padded type, just get
2752 the inner expression. */
2753 if (TREE_CODE (expr) == CONSTRUCTOR
2754 && CONSTRUCTOR_ELTS (expr)
2755 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2756 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2757 else
2758 return convert (type,
2759 build_component_ref (expr, NULL_TREE,
2760 TYPE_FIELDS (etype), false));
2761 }
2762 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2763 {
2764 /* If we previously converted from another type and our type is
2765 of variable size, remove the conversion to avoid the need for
2766 variable-size temporaries. */
2767 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2768 && !TREE_CONSTANT (TYPE_SIZE (type)))
2769 expr = TREE_OPERAND (expr, 0);
2770
2771 /* If we are just removing the padding from expr, convert the original
2772 object if we have variable size. That will avoid the need
2773 for some variable-size temporaries. */
2774 if (TREE_CODE (expr) == COMPONENT_REF
2775 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2776 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2777 && !TREE_CONSTANT (TYPE_SIZE (type)))
2778 return convert (type, TREE_OPERAND (expr, 0));
2779
2780 /* If the result type is a padded type with a self-referentially-sized
2781 field and the expression type is a record, do this as an
2782 unchecked converstion. */
2783 else if (TREE_CODE (etype) == RECORD_TYPE
2784 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2785 return unchecked_convert (type, expr, false);
2786
2787 else
2788 return
2789 gnat_build_constructor (type,
2790 tree_cons (TYPE_FIELDS (type),
2791 convert (TREE_TYPE
2792 (TYPE_FIELDS (type)),
2793 expr),
2794 NULL_TREE));
2795 }
2796
2797 /* If the input is a biased type, adjust first. */
2798 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2799 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2800 fold (build1 (NOP_EXPR,
2801 TREE_TYPE (etype), expr)),
2802 TYPE_MIN_VALUE (etype))));
2803
2804 /* If the input is a left-justified modular type, we need to extract
2805 the actual object before converting it to any other type with the
2806 exception of an unconstrained array. */
2807 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2808 && code != UNCONSTRAINED_ARRAY_TYPE)
2809 return convert (type, build_component_ref (expr, NULL_TREE,
2810 TYPE_FIELDS (etype), false));
2811
2812 /* If converting to a type that contains a template, convert to the data
2813 type and then build the template. */
2814 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2815 {
2816 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2817
2818 /* If the source already has a template, get a reference to the
2819 associated array only, as we are going to rebuild a template
2820 for the target type anyway. */
2821 expr = maybe_unconstrained_array (expr);
2822
2823 return
2824 gnat_build_constructor
2825 (type,
2826 tree_cons (TYPE_FIELDS (type),
2827 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2828 obj_type, NULL_TREE),
2829 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2830 convert (obj_type, expr), NULL_TREE)));
2831 }
2832
2833 /* There are some special cases of expressions that we process
2834 specially. */
2835 switch (TREE_CODE (expr))
2836 {
2837 case ERROR_MARK:
2838 return expr;
2839
2840 case NULL_EXPR:
2841 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2842 conversion in gnat_expand_expr. NULL_EXPR does not represent
2843 and actual value, so no conversion is needed. */
2844 expr = copy_node (expr);
2845 TREE_TYPE (expr) = type;
2846 return expr;
2847
2848 case STRING_CST:
2849 /* If we are converting a STRING_CST to another constrained array type,
2850 just make a new one in the proper type. */
2851 if (code == ecode && AGGREGATE_TYPE_P (etype)
2852 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2853 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2854 && (TREE_CODE (expr) == STRING_CST
2855 || get_alias_set (etype) == get_alias_set (type)))
2856 {
2857 expr = copy_node (expr);
2858 TREE_TYPE (expr) = type;
2859 return expr;
2860 }
2861 break;
2862
2863 case UNCONSTRAINED_ARRAY_REF:
2864 /* Convert this to the type of the inner array by getting the address of
2865 the array from the template. */
2866 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2867 build_component_ref (TREE_OPERAND (expr, 0),
2868 get_identifier ("P_ARRAY"),
2869 NULL_TREE, false));
2870 etype = TREE_TYPE (expr);
2871 ecode = TREE_CODE (etype);
2872 break;
2873
2874 case VIEW_CONVERT_EXPR:
2875 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2876 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2877 return convert (type, TREE_OPERAND (expr, 0));
2878 break;
2879
2880 case INDIRECT_REF:
2881 /* If both types are record types, just convert the pointer and
2882 make a new INDIRECT_REF.
2883
2884 ??? Disable this for now since it causes problems with the
2885 code in build_binary_op for MODIFY_EXPR which wants to
2886 strip off conversions. But that code really is a mess and
2887 we need to do this a much better way some time. */
2888 if (0
2889 && (TREE_CODE (type) == RECORD_TYPE
2890 || TREE_CODE (type) == UNION_TYPE)
2891 && (TREE_CODE (etype) == RECORD_TYPE
2892 || TREE_CODE (etype) == UNION_TYPE)
2893 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2894 return build_unary_op (INDIRECT_REF, NULL_TREE,
2895 convert (build_pointer_type (type),
2896 TREE_OPERAND (expr, 0)));
2897 break;
2898
2899 default:
2900 break;
2901 }
2902
2903 /* Check for converting to a pointer to an unconstrained array. */
2904 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2905 return convert_to_fat_pointer (type, expr);
2906
2907 /* If we're converting between two aggregate types that have the same main
2908 variant, just make a VIEW_CONVER_EXPR. */
2909 else if (AGGREGATE_TYPE_P (type)
2910 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2911 return build1 (VIEW_CONVERT_EXPR, type, expr);
2912
2913 /* In all other cases of related types, make a NOP_EXPR. */
2914 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2915 || (code == INTEGER_CST && ecode == INTEGER_CST
2916 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2917 return fold (build1 (NOP_EXPR, type, expr));
2918
2919 switch (code)
2920 {
2921 case VOID_TYPE:
2922 return build1 (CONVERT_EXPR, type, expr);
2923
2924 case BOOLEAN_TYPE:
2925 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
2926
2927 case INTEGER_TYPE:
2928 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2929 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2930 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2931 return unchecked_convert (type, expr, false);
2932 else if (TYPE_BIASED_REPRESENTATION_P (type))
2933 return fold (build1 (CONVERT_EXPR, type,
2934 fold (build (MINUS_EXPR, TREE_TYPE (type),
2935 convert (TREE_TYPE (type), expr),
2936 TYPE_MIN_VALUE (type)))));
2937
2938 /* ... fall through ... */
2939
2940 case ENUMERAL_TYPE:
2941 return fold (convert_to_integer (type, expr));
2942
2943 case POINTER_TYPE:
2944 case REFERENCE_TYPE:
2945 /* If converting between two pointers to records denoting
2946 both a template and type, adjust if needed to account
2947 for any differing offsets, since one might be negative. */
2948 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2949 {
2950 tree bit_diff
2951 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2952 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2953 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2954 sbitsize_int (BITS_PER_UNIT));
2955
2956 expr = build1 (NOP_EXPR, type, expr);
2957 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2958 if (integer_zerop (byte_diff))
2959 return expr;
2960
2961 return build_binary_op (PLUS_EXPR, type, expr,
2962 fold (convert_to_pointer (type, byte_diff)));
2963 }
2964
2965 /* If converting to a thin pointer, handle specially. */
2966 if (TYPE_THIN_POINTER_P (type)
2967 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
2968 return convert_to_thin_pointer (type, expr);
2969
2970 /* If converting fat pointer to normal pointer, get the pointer to the
2971 array and then convert it. */
2972 else if (TYPE_FAT_POINTER_P (etype))
2973 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2974 NULL_TREE, false);
2975
2976 return fold (convert_to_pointer (type, expr));
2977
2978 case REAL_TYPE:
2979 return fold (convert_to_real (type, expr));
2980
2981 case RECORD_TYPE:
2982 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
2983 return
2984 gnat_build_constructor
2985 (type, tree_cons (TYPE_FIELDS (type),
2986 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2987 NULL_TREE));
2988
2989 /* ... fall through ... */
2990
2991 case ARRAY_TYPE:
2992 /* In these cases, assume the front-end has validated the conversion.
2993 If the conversion is valid, it will be a bit-wise conversion, so
2994 it can be viewed as an unchecked conversion. */
2995 return unchecked_convert (type, expr, false);
2996
2997 case UNION_TYPE:
2998 /* Just validate that the type is indeed that of a field
2999 of the type. Then make the simple conversion. */
3000 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3001 {
3002 if (TREE_TYPE (tem) == etype)
3003 return build1 (CONVERT_EXPR, type, expr);
3004 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3005 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3006 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3007 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3008 return build1 (CONVERT_EXPR, type,
3009 convert (TREE_TYPE (tem), expr));
3010 }
3011
3012 abort ();
3013
3014 case UNCONSTRAINED_ARRAY_TYPE:
3015 /* If EXPR is a constrained array, take its address, convert it to a
3016 fat pointer, and then dereference it. Likewise if EXPR is a
3017 record containing both a template and a constrained array.
3018 Note that a record representing a left justified modular type
3019 always represents a packed constrained array. */
3020 if (ecode == ARRAY_TYPE
3021 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3022 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3023 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3024 return
3025 build_unary_op
3026 (INDIRECT_REF, NULL_TREE,
3027 convert_to_fat_pointer (TREE_TYPE (type),
3028 build_unary_op (ADDR_EXPR,
3029 NULL_TREE, expr)));
3030
3031 /* Do something very similar for converting one unconstrained
3032 array to another. */
3033 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3034 return
3035 build_unary_op (INDIRECT_REF, NULL_TREE,
3036 convert (TREE_TYPE (type),
3037 build_unary_op (ADDR_EXPR,
3038 NULL_TREE, expr)));
3039 else
3040 abort ();
3041
3042 case COMPLEX_TYPE:
3043 return fold (convert_to_complex (type, expr));
3044
3045 default:
3046 abort ();
3047 }
3048 }
3049 \f
3050 /* Remove all conversions that are done in EXP. This includes converting
3051 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3052 is true, always return the address of the containing object even if
3053 the address is not bit-aligned. */
3054
3055 tree
3056 remove_conversions (tree exp, bool true_address)
3057 {
3058 switch (TREE_CODE (exp))
3059 {
3060 case CONSTRUCTOR:
3061 if (true_address
3062 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3063 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3064 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), true);
3065 break;
3066
3067 case COMPONENT_REF:
3068 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3069 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3070 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3071 break;
3072
3073 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3074 case NOP_EXPR: case CONVERT_EXPR:
3075 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3076
3077 default:
3078 break;
3079 }
3080
3081 return exp;
3082 }
3083 \f
3084 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3085 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3086 likewise return an expression pointing to the underlying array. */
3087
3088 tree
3089 maybe_unconstrained_array (tree exp)
3090 {
3091 enum tree_code code = TREE_CODE (exp);
3092 tree new;
3093
3094 switch (TREE_CODE (TREE_TYPE (exp)))
3095 {
3096 case UNCONSTRAINED_ARRAY_TYPE:
3097 if (code == UNCONSTRAINED_ARRAY_REF)
3098 {
3099 new
3100 = build_unary_op (INDIRECT_REF, NULL_TREE,
3101 build_component_ref (TREE_OPERAND (exp, 0),
3102 get_identifier ("P_ARRAY"),
3103 NULL_TREE, false));
3104 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3105 return new;
3106 }
3107
3108 else if (code == NULL_EXPR)
3109 return build1 (NULL_EXPR,
3110 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3111 (TREE_TYPE (TREE_TYPE (exp))))),
3112 TREE_OPERAND (exp, 0));
3113
3114 case RECORD_TYPE:
3115 /* If this is a padded type, convert to the unpadded type and see if
3116 it contains a template. */
3117 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3118 {
3119 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3120 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3121 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3122 return
3123 build_component_ref (new, NULL_TREE,
3124 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3125 0);
3126 }
3127 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3128 return
3129 build_component_ref (exp, NULL_TREE,
3130 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3131 break;
3132
3133 default:
3134 break;
3135 }
3136
3137 return exp;
3138 }
3139 \f
3140 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3141 If NOTRUNC_P is true, truncation operations should be suppressed. */
3142
3143 tree
3144 unchecked_convert (tree type, tree expr, bool notrunc_p)
3145 {
3146 tree etype = TREE_TYPE (expr);
3147
3148 /* If the expression is already the right type, we are done. */
3149 if (etype == type)
3150 return expr;
3151
3152 /* If both types types are integral just do a normal conversion.
3153 Likewise for a conversion to an unconstrained array. */
3154 if ((((INTEGRAL_TYPE_P (type)
3155 && !(TREE_CODE (type) == INTEGER_TYPE
3156 && TYPE_VAX_FLOATING_POINT_P (type)))
3157 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3158 || (TREE_CODE (type) == RECORD_TYPE
3159 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3160 && ((INTEGRAL_TYPE_P (etype)
3161 && !(TREE_CODE (etype) == INTEGER_TYPE
3162 && TYPE_VAX_FLOATING_POINT_P (etype)))
3163 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3164 || (TREE_CODE (etype) == RECORD_TYPE
3165 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3166 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3167 {
3168 tree rtype = type;
3169
3170 if (TREE_CODE (etype) == INTEGER_TYPE
3171 && TYPE_BIASED_REPRESENTATION_P (etype))
3172 {
3173 tree ntype = copy_type (etype);
3174
3175 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3176 TYPE_MAIN_VARIANT (ntype) = ntype;
3177 expr = build1 (NOP_EXPR, ntype, expr);
3178 }
3179
3180 if (TREE_CODE (type) == INTEGER_TYPE
3181 && TYPE_BIASED_REPRESENTATION_P (type))
3182 {
3183 rtype = copy_type (type);
3184 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3185 TYPE_MAIN_VARIANT (rtype) = rtype;
3186 }
3187
3188 expr = convert (rtype, expr);
3189 if (type != rtype)
3190 expr = build1 (NOP_EXPR, type, expr);
3191 }
3192
3193 /* If we are converting TO an integral type whose precision is not the
3194 same as its size, first unchecked convert to a record that contains
3195 an object of the output type. Then extract the field. */
3196 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3197 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3198 GET_MODE_BITSIZE (TYPE_MODE (type))))
3199 {
3200 tree rec_type = make_node (RECORD_TYPE);
3201 tree field = create_field_decl (get_identifier ("OBJ"), type,
3202 rec_type, 1, 0, 0, 0);
3203
3204 TYPE_FIELDS (rec_type) = field;
3205 layout_type (rec_type);
3206
3207 expr = unchecked_convert (rec_type, expr, notrunc_p);
3208 expr = build_component_ref (expr, NULL_TREE, field, 0);
3209 }
3210
3211 /* Similarly for integral input type whose precision is not equal to its
3212 size. */
3213 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3214 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3215 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3216 {
3217 tree rec_type = make_node (RECORD_TYPE);
3218 tree field
3219 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3220 1, 0, 0, 0);
3221
3222 TYPE_FIELDS (rec_type) = field;
3223 layout_type (rec_type);
3224
3225 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3226 expr = unchecked_convert (type, expr, notrunc_p);
3227 }
3228
3229 /* We have a special case when we are converting between two
3230 unconstrained array types. In that case, take the address,
3231 convert the fat pointer types, and dereference. */
3232 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3233 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3234 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3235 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3236 build_unary_op (ADDR_EXPR, NULL_TREE,
3237 expr)));
3238 else
3239 {
3240 expr = maybe_unconstrained_array (expr);
3241
3242 /* There's no point in doing two unchecked conversions in a row. */
3243 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3244 expr = TREE_OPERAND (expr, 0);
3245
3246 etype = TREE_TYPE (expr);
3247 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3248 }
3249
3250 /* If the result is an integral type whose size is not equal to
3251 the size of the underlying machine type, sign- or zero-extend
3252 the result. We need not do this in the case where the input is
3253 an integral type of the same precision and signedness or if the output
3254 is a biased type or if both the input and output are unsigned. */
3255 if (!notrunc_p
3256 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3257 && !(TREE_CODE (type) == INTEGER_TYPE
3258 && TYPE_BIASED_REPRESENTATION_P (type))
3259 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3260 GET_MODE_BITSIZE (TYPE_MODE (type)))
3261 && !(INTEGRAL_TYPE_P (etype)
3262 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3263 && operand_equal_p (TYPE_RM_SIZE (type),
3264 (TYPE_RM_SIZE (etype) != 0
3265 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3266 0))
3267 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3268 {
3269 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3270 TYPE_UNSIGNED (type));
3271 tree shift_expr
3272 = convert (base_type,
3273 size_binop (MINUS_EXPR,
3274 bitsize_int
3275 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3276 TYPE_RM_SIZE (type)));
3277 expr
3278 = convert (type,
3279 build_binary_op (RSHIFT_EXPR, base_type,
3280 build_binary_op (LSHIFT_EXPR, base_type,
3281 convert (base_type, expr),
3282 shift_expr),
3283 shift_expr));
3284 }
3285
3286 /* An unchecked conversion should never raise Constraint_Error. The code
3287 below assumes that GCC's conversion routines overflow the same way that
3288 the underlying hardware does. This is probably true. In the rare case
3289 when it is false, we can rely on the fact that such conversions are
3290 erroneous anyway. */
3291 if (TREE_CODE (expr) == INTEGER_CST)
3292 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3293
3294 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3295 show no longer constant. */
3296 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3297 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3298 OEP_ONLY_CONST))
3299 TREE_CONSTANT (expr) = 0;
3300
3301 return expr;
3302 }
3303
3304 #include "gt-ada-utils.h"
3305 #include "gtype-ada.h"