a-exexpr.adb (Others_Value, [...]): New variables...
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41 #include "cgraph.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
63
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
67
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
70
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
73
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
76
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
81
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
84
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
87 goes later. */
88 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
89
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types[NUM_MACHINE_MODES];
92
93 /* For each binding contour we allocate a binding_level structure to indicate
94 the binding depth. */
95
96 struct gnat_binding_level GTY((chain_next ("%h.chain")))
97 {
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level *chain;
100 /* The BLOCK node for this level. */
101 tree block;
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
104 tree jmpbuf_decl;
105 };
106
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level *current_binding_level;
109
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
112
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable)) tree free_block_chain;
115
116 struct language_function GTY(())
117 {
118 int unused;
119 };
120
121 static void gnat_define_builtin (const char *, tree, int, const char *, bool);
122 static void gnat_install_builtins (void);
123 static tree merge_sizes (tree, tree, tree, bool, bool);
124 static tree compute_related_constant (tree, tree);
125 static tree split_plus (tree, tree *);
126 static bool value_zerop (tree);
127 static void gnat_gimplify_function (tree);
128 static tree float_type_for_precision (int, enum machine_mode);
129 static tree convert_to_fat_pointer (tree, tree);
130 static tree convert_to_thin_pointer (tree, tree);
131 static tree make_descriptor_field (const char *,tree, tree, tree);
132 static bool value_factor_p (tree, HOST_WIDE_INT);
133 static bool potential_alignment_gap (tree, tree, tree);
134 \f
135 /* Initialize the association of GNAT nodes to GCC trees. */
136
137 void
138 init_gnat_to_gnu (void)
139 {
140 associate_gnat_to_gnu
141 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
142 }
143
144 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
145 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
146 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
147
148 If GNU_DECL is zero, a previous association is to be reset. */
149
150 void
151 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
152 {
153 /* Check that GNAT_ENTITY is not already defined and that it is being set
154 to something which is a decl. Raise gigi 401 if not. Usually, this
155 means GNAT_ENTITY is defined twice, but occasionally is due to some
156 Gigi problem. */
157 gcc_assert (!gnu_decl
158 || (!associate_gnat_to_gnu[gnat_entity - First_Node_Id]
159 && (no_check || DECL_P (gnu_decl))));
160 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
161 }
162
163 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
164 Return the ..._DECL node that was associated with it. If there is no tree
165 node associated with GNAT_ENTITY, abort.
166
167 In some cases, such as delayed elaboration or expressions that need to
168 be elaborated only once, GNAT_ENTITY is really not an entity. */
169
170 tree
171 get_gnu_tree (Entity_Id gnat_entity)
172 {
173 gcc_assert (associate_gnat_to_gnu[gnat_entity - First_Node_Id]);
174 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
175 }
176
177 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
178
179 bool
180 present_gnu_tree (Entity_Id gnat_entity)
181 {
182 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id]) != 0;
183 }
184
185 \f
186 /* Return non-zero if we are currently in the global binding level. */
187
188 int
189 global_bindings_p (void)
190 {
191 return ((force_global || !current_function_decl) ? -1 : 0);
192 }
193
194 /* Enter a new binding level. */
195
196 void
197 gnat_pushlevel ()
198 {
199 struct gnat_binding_level *newlevel = NULL;
200
201 /* Reuse a struct for this binding level, if there is one. */
202 if (free_binding_level)
203 {
204 newlevel = free_binding_level;
205 free_binding_level = free_binding_level->chain;
206 }
207 else
208 newlevel
209 = (struct gnat_binding_level *)
210 ggc_alloc (sizeof (struct gnat_binding_level));
211
212 /* Use a free BLOCK, if any; otherwise, allocate one. */
213 if (free_block_chain)
214 {
215 newlevel->block = free_block_chain;
216 free_block_chain = TREE_CHAIN (free_block_chain);
217 TREE_CHAIN (newlevel->block) = NULL_TREE;
218 }
219 else
220 newlevel->block = make_node (BLOCK);
221
222 /* Point the BLOCK we just made to its parent. */
223 if (current_binding_level)
224 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
225
226 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
227 TREE_USED (newlevel->block) = 1;
228
229 /* Add this level to the front of the chain (stack) of levels that are
230 active. */
231 newlevel->chain = current_binding_level;
232 newlevel->jmpbuf_decl = NULL_TREE;
233 current_binding_level = newlevel;
234 }
235
236 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
237 and point FNDECL to this BLOCK. */
238
239 void
240 set_current_block_context (tree fndecl)
241 {
242 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
243 DECL_INITIAL (fndecl) = current_binding_level->block;
244 }
245
246 /* Set the jmpbuf_decl for the current binding level to DECL. */
247
248 void
249 set_block_jmpbuf_decl (tree decl)
250 {
251 current_binding_level->jmpbuf_decl = decl;
252 }
253
254 /* Get the jmpbuf_decl, if any, for the current binding level. */
255
256 tree
257 get_block_jmpbuf_decl ()
258 {
259 return current_binding_level->jmpbuf_decl;
260 }
261
262 /* Exit a binding level. Set any BLOCK into the current code group. */
263
264 void
265 gnat_poplevel ()
266 {
267 struct gnat_binding_level *level = current_binding_level;
268 tree block = level->block;
269
270 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
271 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
272
273 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
274 are no variables free the block and merge its subblocks into those of its
275 parent block. Otherwise, add it to the list of its parent. */
276 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
277 ;
278 else if (BLOCK_VARS (block) == NULL_TREE)
279 {
280 BLOCK_SUBBLOCKS (level->chain->block)
281 = chainon (BLOCK_SUBBLOCKS (block),
282 BLOCK_SUBBLOCKS (level->chain->block));
283 TREE_CHAIN (block) = free_block_chain;
284 free_block_chain = block;
285 }
286 else
287 {
288 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
289 BLOCK_SUBBLOCKS (level->chain->block) = block;
290 TREE_USED (block) = 1;
291 set_block_for_group (block);
292 }
293
294 /* Free this binding structure. */
295 current_binding_level = level->chain;
296 level->chain = free_binding_level;
297 free_binding_level = level;
298 }
299
300 /* Insert BLOCK at the end of the list of subblocks of the
301 current binding level. This is used when a BIND_EXPR is expanded,
302 to handle the BLOCK node inside the BIND_EXPR. */
303
304 void
305 insert_block (tree block)
306 {
307 TREE_USED (block) = 1;
308 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
309 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
310 }
311 \f
312 /* Records a ..._DECL node DECL as belonging to the current lexical scope
313 and uses GNAT_NODE for location information. */
314
315 void
316 gnat_pushdecl (tree decl, Node_Id gnat_node)
317 {
318 /* If at top level, there is no context. But PARM_DECLs always go in the
319 level of its function. */
320 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
321 DECL_CONTEXT (decl) = 0;
322 else
323 DECL_CONTEXT (decl) = current_function_decl;
324
325 /* Set the location of DECL and emit a declaration for it. */
326 if (Present (gnat_node))
327 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
328 add_decl_expr (decl, gnat_node);
329
330 /* Put the declaration on the list. The list of declarations is in reverse
331 order. The list will be reversed later. We don't do this for global
332 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
333 the list. They will cause trouble with the debugger and aren't needed
334 anyway. */
335 if (!global_bindings_p ()
336 && (TREE_CODE (decl) != TYPE_DECL
337 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE))
338 {
339 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
340 BLOCK_VARS (current_binding_level->block) = decl;
341 }
342
343 /* For the declaration of a type, set its name if it either is not already
344 set, was set to an IDENTIFIER_NODE, indicating an internal name,
345 or if the previous type name was not derived from a source name.
346 We'd rather have the type named with a real name and all the pointer
347 types to the same object have the same POINTER_TYPE node. Code in this
348 function in c-decl.c makes a copy of the type node here, but that may
349 cause us trouble with incomplete types, so let's not try it (at least
350 for now). */
351
352 if (TREE_CODE (decl) == TYPE_DECL
353 && DECL_NAME (decl)
354 && (!TYPE_NAME (TREE_TYPE (decl))
355 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
356 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
357 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
358 && !DECL_ARTIFICIAL (decl))))
359 TYPE_NAME (TREE_TYPE (decl)) = decl;
360
361 if (TREE_CODE (decl) != CONST_DECL)
362 rest_of_decl_compilation (decl, global_bindings_p (), 0);
363 }
364 \f
365 /* Do little here. Set up the standard declarations later after the
366 front end has been run. */
367
368 void
369 gnat_init_decl_processing (void)
370 {
371 input_line = 0;
372
373 /* Make the binding_level structure for global names. */
374 current_function_decl = 0;
375 current_binding_level = 0;
376 free_binding_level = 0;
377 gnat_pushlevel ();
378
379 build_common_tree_nodes (true, true);
380
381 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
382 corresponding to the size of Pmode. In most cases when ptr_mode and
383 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
384 far better code using the width of Pmode. Make this here since we need
385 this before we can expand the GNAT types. */
386 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
387 set_sizetype (size_type_node);
388 build_common_tree_nodes_2 (0);
389
390 /* Give names and make TYPE_DECLs for common types. */
391 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
392 Empty);
393 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
394 integer_type_node),
395 Empty);
396 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
397 char_type_node),
398 Empty);
399 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
400 long_integer_type_node),
401 Empty);
402
403 ptr_void_type_node = build_pointer_type (void_type_node);
404
405 gnat_install_builtins ();
406 }
407
408 /* Define a builtin function. This is temporary and is just being done
409 to initialize *_built_in_decls for the middle-end. We'll want
410 to do full builtin processing soon. */
411
412 static void
413 gnat_define_builtin (const char *name, tree type,
414 int function_code, const char *library_name, bool const_p)
415 {
416 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
417
418 DECL_EXTERNAL (decl) = 1;
419 TREE_PUBLIC (decl) = 1;
420 if (library_name)
421 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
422 make_decl_rtl (decl);
423 gnat_pushdecl (decl, Empty);
424 DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
425 DECL_FUNCTION_CODE (decl) = function_code;
426 TREE_READONLY (decl) = const_p;
427
428 implicit_built_in_decls[function_code] = decl;
429 built_in_decls[function_code] = decl;
430 }
431
432 /* Install the builtin functions the middle-end needs. */
433
434 static void
435 gnat_install_builtins ()
436 {
437 tree ftype;
438 tree tmp;
439
440 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
441 tmp = tree_cons (NULL_TREE, long_integer_type_node, tmp);
442 ftype = build_function_type (long_integer_type_node, tmp);
443 gnat_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT,
444 "__builtin_expect", true);
445
446 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
447 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
448 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
449 ftype = build_function_type (ptr_void_type_node, tmp);
450 gnat_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
451 "memcpy", false);
452
453 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
454 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
455 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
456 ftype = build_function_type (integer_type_node, tmp);
457 gnat_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
458 "memcmp", false);
459
460 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
461 tmp = tree_cons (NULL_TREE, integer_type_node, tmp);
462 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
463 ftype = build_function_type (integer_type_node, tmp);
464 gnat_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
465 "memset", false);
466
467 tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
468 ftype = build_function_type (integer_type_node, tmp);
469 gnat_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "clz", true);
470
471 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
472 ftype = build_function_type (integer_type_node, tmp);
473 gnat_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "clzl", true);
474
475 tmp = tree_cons (NULL_TREE, long_long_integer_type_node, void_list_node);
476 ftype = build_function_type (integer_type_node, tmp);
477 gnat_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "clzll",
478 true);
479
480 /* The init_trampoline and adjust_trampoline builtins aren't used directly.
481 They are inserted during lowering of nested functions. */
482
483 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
484 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
485 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
486 ftype = build_function_type (void_type_node, tmp);
487 gnat_define_builtin ("__builtin_init_trampoline", ftype,
488 BUILT_IN_INIT_TRAMPOLINE, "init_trampoline", false);
489
490 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
491 ftype = build_function_type (ptr_void_type_node, tmp);
492 gnat_define_builtin ("__builtin_adjust_trampoline", ftype,
493 BUILT_IN_ADJUST_TRAMPOLINE, "adjust_trampoline", true);
494
495 /* The stack_save, stack_restore, and alloca builtins aren't used directly.
496 They are inserted during gimplification to implement variable sized stack
497 allocation. */
498
499 ftype = build_function_type (ptr_void_type_node, void_list_node);
500 gnat_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
501 "stack_save", false);
502
503 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
504 ftype = build_function_type (void_type_node, tmp);
505 gnat_define_builtin ("__builtin_stack_restore", ftype,
506 BUILT_IN_STACK_RESTORE, "stack_restore", false);
507
508 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
509 ftype = build_function_type (ptr_void_type_node, tmp);
510 gnat_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA,
511 "alloca", false);
512 }
513
514 /* Create the predefined scalar types such as `integer_type_node' needed
515 in the gcc back-end and initialize the global binding level. */
516
517 void
518 init_gigi_decls (tree long_long_float_type, tree exception_type)
519 {
520 tree endlink, decl;
521 unsigned int i;
522
523 /* Set the types that GCC and Gigi use from the front end. We would like
524 to do this for char_type_node, but it needs to correspond to the C
525 char type. */
526 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 {
528 /* In this case, the builtin floating point types are VAX float,
529 so make up a type for use. */
530 longest_float_type_node = make_node (REAL_TYPE);
531 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
532 layout_type (longest_float_type_node);
533 create_type_decl (get_identifier ("longest float type"),
534 longest_float_type_node, NULL, false, true, Empty);
535 }
536 else
537 longest_float_type_node = TREE_TYPE (long_long_float_type);
538
539 except_type_node = TREE_TYPE (exception_type);
540
541 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
542 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
543 NULL, false, true, Empty);
544
545 void_type_decl_node = create_type_decl (get_identifier ("void"),
546 void_type_node, NULL, false, true,
547 Empty);
548
549 void_ftype = build_function_type (void_type_node, NULL_TREE);
550 ptr_void_ftype = build_pointer_type (void_ftype);
551
552 /* Now declare runtime functions. */
553 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
554
555 /* malloc is a function declaration tree for a function to allocate
556 memory. */
557 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
558 NULL_TREE,
559 build_function_type (ptr_void_type_node,
560 tree_cons (NULL_TREE,
561 sizetype,
562 endlink)),
563 NULL_TREE, false, true, true, NULL,
564 Empty);
565
566 /* free is a function declaration tree for a function to free memory. */
567 free_decl
568 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
569 build_function_type (void_type_node,
570 tree_cons (NULL_TREE,
571 ptr_void_type_node,
572 endlink)),
573 NULL_TREE, false, true, true, NULL, Empty);
574
575 /* Make the types and functions used for exception processing. */
576 jmpbuf_type
577 = build_array_type (gnat_type_for_mode (Pmode, 0),
578 build_index_type (build_int_cst (NULL_TREE, 5)));
579 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
580 false, true, Empty);
581 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
582
583 /* Functions to get and set the jumpbuf pointer for the current thread. */
584 get_jmpbuf_decl
585 = create_subprog_decl
586 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
587 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
588 NULL_TREE, false, true, true, NULL, Empty);
589
590 set_jmpbuf_decl
591 = create_subprog_decl
592 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
593 NULL_TREE,
594 build_function_type (void_type_node,
595 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
596 NULL_TREE, false, true, true, NULL, Empty);
597
598 /* Function to get the current exception. */
599 get_excptr_decl
600 = create_subprog_decl
601 (get_identifier ("system__soft_links__get_gnat_exception"),
602 NULL_TREE,
603 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
604 NULL_TREE, false, true, true, NULL, Empty);
605
606 /* Functions that raise exceptions. */
607 raise_nodefer_decl
608 = create_subprog_decl
609 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
610 build_function_type (void_type_node,
611 tree_cons (NULL_TREE,
612 build_pointer_type (except_type_node),
613 endlink)),
614 NULL_TREE, false, true, true, NULL, Empty);
615
616 /* Dummy objects to materialize "others" and "all others" in the exception
617 tables. These are exported by a-exexpr.adb, so see this unit for the
618 types to use. */
619
620 others_decl
621 = create_var_decl (get_identifier ("OTHERS"),
622 get_identifier ("__gnat_others_value"),
623 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
624
625 all_others_decl
626 = create_var_decl (get_identifier ("ALL_OTHERS"),
627 get_identifier ("__gnat_all_others_value"),
628 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
629
630 /* Hooks to call when entering/leaving an exception handler. */
631 begin_handler_decl
632 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
633 build_function_type (void_type_node,
634 tree_cons (NULL_TREE,
635 ptr_void_type_node,
636 endlink)),
637 NULL_TREE, false, true, true, NULL, Empty);
638
639 end_handler_decl
640 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
641 build_function_type (void_type_node,
642 tree_cons (NULL_TREE,
643 ptr_void_type_node,
644 endlink)),
645 NULL_TREE, false, true, true, NULL, Empty);
646
647 /* If in no exception handlers mode, all raise statements are redirected to
648 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
649 this procedure will never be called in this mode. */
650 if (No_Exception_Handlers_Set ())
651 {
652 decl
653 = create_subprog_decl
654 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
655 build_function_type (void_type_node,
656 tree_cons (NULL_TREE,
657 build_pointer_type (char_type_node),
658 tree_cons (NULL_TREE,
659 integer_type_node,
660 endlink))),
661 NULL_TREE, false, true, true, NULL, Empty);
662
663 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
664 gnat_raise_decls[i] = decl;
665 }
666 else
667 /* Otherwise, make one decl for each exception reason. */
668 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
669 {
670 char name[17];
671
672 sprintf (name, "__gnat_rcheck_%.2d", i);
673 gnat_raise_decls[i]
674 = create_subprog_decl
675 (get_identifier (name), NULL_TREE,
676 build_function_type (void_type_node,
677 tree_cons (NULL_TREE,
678 build_pointer_type
679 (char_type_node),
680 tree_cons (NULL_TREE,
681 integer_type_node,
682 endlink))),
683 NULL_TREE, false, true, true, NULL, Empty);
684 }
685
686 /* Indicate that these never return. */
687 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
688 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
689 TREE_TYPE (raise_nodefer_decl)
690 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
691 TYPE_QUAL_VOLATILE);
692
693 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
694 {
695 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
696 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
697 TREE_TYPE (gnat_raise_decls[i])
698 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
699 TYPE_QUAL_VOLATILE);
700 }
701
702 /* setjmp returns an integer and has one operand, which is a pointer to
703 a jmpbuf. */
704 setjmp_decl
705 = create_subprog_decl
706 (get_identifier ("__builtin_setjmp"), NULL_TREE,
707 build_function_type (integer_type_node,
708 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
709 NULL_TREE, false, true, true, NULL, Empty);
710
711 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
712 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
713
714 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
715 address. */
716 update_setjmp_buf_decl
717 = create_subprog_decl
718 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
719 build_function_type (void_type_node,
720 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
721 NULL_TREE, false, true, true, NULL, Empty);
722
723 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
724 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
725
726 main_identifier_node = get_identifier ("main");
727 }
728 \f
729 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
730 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
731 true, this record has a rep clause; don't call layout_type but merely set
732 the size and alignment ourselves. If DEFER_DEBUG is true, do not call
733 the debugging routines on this type; it will be done later. */
734
735 void
736 finish_record_type (tree record_type, tree fieldlist, bool has_rep,
737 bool defer_debug)
738 {
739 enum tree_code code = TREE_CODE (record_type);
740 tree ada_size = bitsize_zero_node;
741 tree size = bitsize_zero_node;
742 bool var_size = false;
743 tree field;
744
745 TYPE_FIELDS (record_type) = fieldlist;
746 TYPE_STUB_DECL (record_type)
747 = build_decl (TYPE_DECL, NULL_TREE, record_type);
748
749 /* We don't need both the typedef name and the record name output in
750 the debugging information, since they are the same. */
751 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
752
753 /* Globally initialize the record first. If this is a rep'ed record,
754 that just means some initializations; otherwise, layout the record. */
755
756 if (has_rep)
757 {
758 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
759 TYPE_MODE (record_type) = BLKmode;
760 if (!TYPE_SIZE (record_type))
761 {
762 TYPE_SIZE (record_type) = bitsize_zero_node;
763 TYPE_SIZE_UNIT (record_type) = size_zero_node;
764 }
765 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
766 out just like a UNION_TYPE, since the size will be fixed. */
767 else if (code == QUAL_UNION_TYPE)
768 code = UNION_TYPE;
769 }
770 else
771 {
772 /* Ensure there isn't a size already set. There can be in an error
773 case where there is a rep clause but all fields have errors and
774 no longer have a position. */
775 TYPE_SIZE (record_type) = 0;
776 layout_type (record_type);
777 }
778
779 /* At this point, the position and size of each field is known. It was
780 either set before entry by a rep clause, or by laying out the type above.
781
782 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
783 to compute the Ada size; the GCC size and alignment (for rep'ed records
784 that are not padding types); and the mode (for rep'ed records). We also
785 clear the DECL_BIT_FIELD indication for the cases we know have not been
786 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
787
788 if (code == QUAL_UNION_TYPE)
789 fieldlist = nreverse (fieldlist);
790
791 for (field = fieldlist; field; field = TREE_CHAIN (field))
792 {
793 tree pos = bit_position (field);
794
795 tree type = TREE_TYPE (field);
796 tree this_size = DECL_SIZE (field);
797 tree this_ada_size = DECL_SIZE (field);
798
799 /* We need to make an XVE/XVU record if any field has variable size,
800 whether or not the record does. For example, if we have an union,
801 it may be that all fields, rounded up to the alignment, have the
802 same size, in which case we'll use that size. But the debug
803 output routines (except Dwarf2) won't be able to output the fields,
804 so we need to make the special record. */
805 if (TREE_CODE (this_size) != INTEGER_CST)
806 var_size = true;
807
808 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
809 || TREE_CODE (type) == QUAL_UNION_TYPE)
810 && !TYPE_IS_FAT_POINTER_P (type)
811 && !TYPE_CONTAINS_TEMPLATE_P (type)
812 && TYPE_ADA_SIZE (type))
813 this_ada_size = TYPE_ADA_SIZE (type);
814
815 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
816 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
817 && value_factor_p (pos, BITS_PER_UNIT)
818 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
819 DECL_BIT_FIELD (field) = 0;
820
821 /* If we still have DECL_BIT_FIELD set at this point, we know the field
822 is technically not addressable. Except that it can actually be
823 addressed if the field is BLKmode and happens to be properly
824 aligned. */
825 DECL_NONADDRESSABLE_P (field)
826 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
827
828 if (has_rep && !DECL_BIT_FIELD (field))
829 TYPE_ALIGN (record_type)
830 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
831
832 switch (code)
833 {
834 case UNION_TYPE:
835 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
836 size = size_binop (MAX_EXPR, size, this_size);
837 break;
838
839 case QUAL_UNION_TYPE:
840 ada_size
841 = fold (build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
842 this_ada_size, ada_size));
843 size = fold (build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
844 this_size, size));
845 break;
846
847 case RECORD_TYPE:
848 /* Since we know here that all fields are sorted in order of
849 increasing bit position, the size of the record is one
850 higher than the ending bit of the last field processed
851 unless we have a rep clause, since in that case we might
852 have a field outside a QUAL_UNION_TYPE that has a higher ending
853 position. So use a MAX in that case. Also, if this field is a
854 QUAL_UNION_TYPE, we need to take into account the previous size in
855 the case of empty variants. */
856 ada_size
857 = merge_sizes (ada_size, pos, this_ada_size,
858 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
859 size = merge_sizes (size, pos, this_size,
860 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
861 break;
862
863 default:
864 gcc_unreachable ();
865 }
866 }
867
868 if (code == QUAL_UNION_TYPE)
869 nreverse (fieldlist);
870
871 /* If this is a padding record, we never want to make the size smaller than
872 what was specified in it, if any. */
873 if (TREE_CODE (record_type) == RECORD_TYPE
874 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
875 size = TYPE_SIZE (record_type);
876
877 /* Now set any of the values we've just computed that apply. */
878 if (!TYPE_IS_FAT_POINTER_P (record_type)
879 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
880 SET_TYPE_ADA_SIZE (record_type, ada_size);
881
882 if (has_rep)
883 {
884 if (!(TREE_CODE (record_type) == RECORD_TYPE
885 && TYPE_IS_PADDING_P (record_type)
886 && CONTAINS_PLACEHOLDER_P (size)))
887 {
888 tree size_unit
889 = convert (sizetype, size_binop (CEIL_DIV_EXPR, size,
890 bitsize_unit_node));
891 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
892 TYPE_SIZE_UNIT (record_type)
893 = round_up (size_unit,
894 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
895 }
896
897 compute_record_mode (record_type);
898 }
899
900 if (!defer_debug)
901 {
902 /* If this record is of variable size, rename it so that the
903 debugger knows it is and make a new, parallel, record
904 that tells the debugger how the record is laid out. See
905 exp_dbug.ads. But don't do this for records that are padding
906 since they confuse GDB. */
907 if (var_size
908 && !(TREE_CODE (record_type) == RECORD_TYPE
909 && TYPE_IS_PADDING_P (record_type)))
910 {
911 tree new_record_type
912 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
913 ? UNION_TYPE : TREE_CODE (record_type));
914 tree orig_name = TYPE_NAME (record_type);
915 tree orig_id
916 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
917 : orig_name);
918 tree new_id
919 = concat_id_with_name (orig_id,
920 TREE_CODE (record_type) == QUAL_UNION_TYPE
921 ? "XVU" : "XVE");
922 tree last_pos = bitsize_zero_node;
923 tree old_field;
924 tree prev_old_field = 0;
925
926 TYPE_NAME (new_record_type) = new_id;
927 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
928 TYPE_STUB_DECL (new_record_type)
929 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
930 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
931 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
932 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
933 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
934 TYPE_SIZE_UNIT (new_record_type)
935 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
936
937 /* Now scan all the fields, replacing each field with a new
938 field corresponding to the new encoding. */
939 for (old_field = TYPE_FIELDS (record_type); old_field;
940 old_field = TREE_CHAIN (old_field))
941 {
942 tree field_type = TREE_TYPE (old_field);
943 tree field_name = DECL_NAME (old_field);
944 tree new_field;
945 tree curpos = bit_position (old_field);
946 bool var = false;
947 unsigned int align = 0;
948 tree pos;
949
950 /* See how the position was modified from the last position.
951
952 There are two basic cases we support: a value was added
953 to the last position or the last position was rounded to
954 a boundary and they something was added. Check for the
955 first case first. If not, see if there is any evidence
956 of rounding. If so, round the last position and try
957 again.
958
959 If this is a union, the position can be taken as zero. */
960
961 if (TREE_CODE (new_record_type) == UNION_TYPE)
962 pos = bitsize_zero_node, align = 0;
963 else
964 pos = compute_related_constant (curpos, last_pos);
965
966 if (!pos && TREE_CODE (curpos) == MULT_EXPR
967 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
968 {
969 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
970 pos = compute_related_constant (curpos,
971 round_up (last_pos, align));
972 }
973 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
974 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
975 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
976 && host_integerp (TREE_OPERAND
977 (TREE_OPERAND (curpos, 0), 1),
978 1))
979 {
980 align
981 = tree_low_cst
982 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
983 pos = compute_related_constant (curpos,
984 round_up (last_pos, align));
985 }
986 else if (potential_alignment_gap (prev_old_field, old_field,
987 pos))
988 {
989 align = TYPE_ALIGN (field_type);
990 pos = compute_related_constant (curpos,
991 round_up (last_pos, align));
992 }
993
994 /* If we can't compute a position, set it to zero.
995
996 ??? We really should abort here, but it's too much work
997 to get this correct for all cases. */
998
999 if (!pos)
1000 pos = bitsize_zero_node;
1001
1002 /* See if this type is variable-size and make a new type
1003 and indicate the indirection if so. */
1004 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1005 {
1006 field_type = build_pointer_type (field_type);
1007 var = true;
1008 }
1009
1010 /* Make a new field name, if necessary. */
1011 if (var || align != 0)
1012 {
1013 char suffix[6];
1014
1015 if (align != 0)
1016 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1017 align / BITS_PER_UNIT);
1018 else
1019 strcpy (suffix, "XVL");
1020
1021 field_name = concat_id_with_name (field_name, suffix);
1022 }
1023
1024 new_field = create_field_decl (field_name, field_type,
1025 new_record_type, 0,
1026 DECL_SIZE (old_field), pos, 0);
1027 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1028 TYPE_FIELDS (new_record_type) = new_field;
1029
1030 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1031 zero. The only time it's not the last field of the record
1032 is when there are other components at fixed positions after
1033 it (meaning there was a rep clause for every field) and we
1034 want to be able to encode them. */
1035 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1036 (TREE_CODE (TREE_TYPE (old_field))
1037 == QUAL_UNION_TYPE)
1038 ? bitsize_zero_node
1039 : DECL_SIZE (old_field));
1040 prev_old_field = old_field;
1041 }
1042
1043 TYPE_FIELDS (new_record_type)
1044 = nreverse (TYPE_FIELDS (new_record_type));
1045
1046 rest_of_type_compilation (new_record_type, global_bindings_p ());
1047 }
1048
1049 rest_of_type_compilation (record_type, global_bindings_p ());
1050 }
1051 }
1052
1053 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1054 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1055 if this represents a QUAL_UNION_TYPE in which case we must look for
1056 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1057 is nonzero, we must take the MAX of the end position of this field
1058 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1059
1060 We return an expression for the size. */
1061
1062 static tree
1063 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1064 bool has_rep)
1065 {
1066 tree type = TREE_TYPE (last_size);
1067 tree new;
1068
1069 if (!special || TREE_CODE (size) != COND_EXPR)
1070 {
1071 new = size_binop (PLUS_EXPR, first_bit, size);
1072 if (has_rep)
1073 new = size_binop (MAX_EXPR, last_size, new);
1074 }
1075
1076 else
1077 new = fold (build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1078 integer_zerop (TREE_OPERAND (size, 1))
1079 ? last_size : merge_sizes (last_size, first_bit,
1080 TREE_OPERAND (size, 1),
1081 1, has_rep),
1082 integer_zerop (TREE_OPERAND (size, 2))
1083 ? last_size : merge_sizes (last_size, first_bit,
1084 TREE_OPERAND (size, 2),
1085 1, has_rep)));
1086
1087 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1088 when fed through substitute_in_expr) into thinking that a constant
1089 size is not constant. */
1090 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1091 new = TREE_OPERAND (new, 0);
1092
1093 return new;
1094 }
1095
1096 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1097 related by the addition of a constant. Return that constant if so. */
1098
1099 static tree
1100 compute_related_constant (tree op0, tree op1)
1101 {
1102 tree op0_var, op1_var;
1103 tree op0_con = split_plus (op0, &op0_var);
1104 tree op1_con = split_plus (op1, &op1_var);
1105 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1106
1107 if (operand_equal_p (op0_var, op1_var, 0))
1108 return result;
1109 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1110 return result;
1111 else
1112 return 0;
1113 }
1114
1115 /* Utility function of above to split a tree OP which may be a sum, into a
1116 constant part, which is returned, and a variable part, which is stored
1117 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1118 bitsizetype. */
1119
1120 static tree
1121 split_plus (tree in, tree *pvar)
1122 {
1123 /* Strip NOPS in order to ease the tree traversal and maximize the
1124 potential for constant or plus/minus discovery. We need to be careful
1125 to always return and set *pvar to bitsizetype trees, but it's worth
1126 the effort. */
1127 STRIP_NOPS (in);
1128
1129 *pvar = convert (bitsizetype, in);
1130
1131 if (TREE_CODE (in) == INTEGER_CST)
1132 {
1133 *pvar = bitsize_zero_node;
1134 return convert (bitsizetype, in);
1135 }
1136 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1137 {
1138 tree lhs_var, rhs_var;
1139 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1140 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1141
1142 if (lhs_var == TREE_OPERAND (in, 0)
1143 && rhs_var == TREE_OPERAND (in, 1))
1144 return bitsize_zero_node;
1145
1146 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1147 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1148 }
1149 else
1150 return bitsize_zero_node;
1151 }
1152 \f
1153 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1154 subprogram. If it is void_type_node, then we are dealing with a procedure,
1155 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1156 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1157 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1158 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1159 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1160 RETURNS_WITH_DSP is nonzero if the function is to return with a
1161 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1162 is to be passed (as its first parameter) the address of the place to copy
1163 its result. */
1164
1165 tree
1166 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1167 bool returns_unconstrained, bool returns_by_ref,
1168 bool returns_with_dsp, bool returns_by_target_ptr)
1169 {
1170 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1171 the subprogram formal parameters. This list is generated by traversing the
1172 input list of PARM_DECL nodes. */
1173 tree param_type_list = NULL;
1174 tree param_decl;
1175 tree type;
1176
1177 for (param_decl = param_decl_list; param_decl;
1178 param_decl = TREE_CHAIN (param_decl))
1179 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1180 param_type_list);
1181
1182 /* The list of the function parameter types has to be terminated by the void
1183 type to signal to the back-end that we are not dealing with a variable
1184 parameter subprogram, but that the subprogram has a fixed number of
1185 parameters. */
1186 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1187
1188 /* The list of argument types has been created in reverse
1189 so nreverse it. */
1190 param_type_list = nreverse (param_type_list);
1191
1192 type = build_function_type (return_type, param_type_list);
1193
1194 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1195 or the new type should, make a copy of TYPE. Likewise for
1196 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1197 if (TYPE_CI_CO_LIST (type) || cico_list
1198 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1199 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1200 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1201 type = copy_type (type);
1202
1203 TYPE_CI_CO_LIST (type) = cico_list;
1204 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1205 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1206 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1207 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1208 return type;
1209 }
1210 \f
1211 /* Return a copy of TYPE but safe to modify in any way. */
1212
1213 tree
1214 copy_type (tree type)
1215 {
1216 tree new = copy_node (type);
1217
1218 /* copy_node clears this field instead of copying it, because it is
1219 aliased with TREE_CHAIN. */
1220 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1221
1222 TYPE_POINTER_TO (new) = 0;
1223 TYPE_REFERENCE_TO (new) = 0;
1224 TYPE_MAIN_VARIANT (new) = new;
1225 TYPE_NEXT_VARIANT (new) = 0;
1226
1227 return new;
1228 }
1229 \f
1230 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1231 TYPE_INDEX_TYPE is INDEX. */
1232
1233 tree
1234 create_index_type (tree min, tree max, tree index)
1235 {
1236 /* First build a type for the desired range. */
1237 tree type = build_index_2_type (min, max);
1238
1239 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1240 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1241 is set, but not to INDEX, make a copy of this type with the requested
1242 index type. Note that we have no way of sharing these types, but that's
1243 only a small hole. */
1244 if (TYPE_INDEX_TYPE (type) == index)
1245 return type;
1246 else if (TYPE_INDEX_TYPE (type))
1247 type = copy_type (type);
1248
1249 SET_TYPE_INDEX_TYPE (type, index);
1250 create_type_decl (NULL_TREE, type, NULL, true, false, Empty);
1251 return type;
1252 }
1253 \f
1254 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1255 string) and TYPE is a ..._TYPE node giving its data type.
1256 ARTIFICIAL_P is true if this is a declaration that was generated
1257 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1258 information about this type. GNAT_NODE is used for the position of
1259 the decl. */
1260
1261 tree
1262 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1263 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1264 {
1265 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1266 enum tree_code code = TREE_CODE (type);
1267
1268 DECL_ARTIFICIAL (type_decl) = artificial_p;
1269
1270 process_attributes (type_decl, attr_list);
1271
1272 /* Pass type declaration information to the debugger unless this is an
1273 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1274 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1275 a dummy type, which will be completed later, or a type for which
1276 debugging information was not requested. */
1277 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1278 || !debug_info_p)
1279 DECL_IGNORED_P (type_decl) = 1;
1280 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1281 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1282 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1283 rest_of_decl_compilation (type_decl, global_bindings_p (), 0);
1284
1285 if (!TYPE_IS_DUMMY_P (type))
1286 gnat_pushdecl (type_decl, gnat_node);
1287
1288 return type_decl;
1289 }
1290
1291 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1292 ASM_NAME is its assembler name (if provided). TYPE is its data type
1293 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1294 expression; NULL_TREE if none.
1295
1296 CONST_FLAG is true if this variable is constant.
1297
1298 PUBLIC_FLAG is true if this definition is to be made visible outside of
1299 the current compilation unit. This flag should be set when processing the
1300 variable definitions in a package specification. EXTERN_FLAG is nonzero
1301 when processing an external variable declaration (as opposed to a
1302 definition: no storage is to be allocated for the variable here).
1303
1304 STATIC_FLAG is only relevant when not at top level. In that case
1305 it indicates whether to always allocate storage to the variable.
1306
1307 GNAT_NODE is used for the position of the decl. */
1308
1309 tree
1310 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1311 bool const_flag, bool public_flag, bool extern_flag,
1312 bool static_flag, struct attrib *attr_list, Node_Id gnat_node)
1313 {
1314 bool init_const
1315 = (!var_init
1316 ? false
1317 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1318 && (global_bindings_p () || static_flag
1319 ? 0 != initializer_constant_valid_p (var_init,
1320 TREE_TYPE (var_init))
1321 : TREE_CONSTANT (var_init))));
1322 tree var_decl
1323 = build_decl ((const_flag && init_const
1324 /* Only make a CONST_DECL for sufficiently-small objects.
1325 We consider complex double "sufficiently-small" */
1326 && TYPE_SIZE (type) != 0
1327 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1328 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1329 GET_MODE_SIZE (DCmode)))
1330 ? CONST_DECL : VAR_DECL, var_name, type);
1331
1332 /* If this is external, throw away any initializations unless this is a
1333 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1334 If we are defining a global here, leave a constant initialization and
1335 save any variable elaborations for the elaboration routine. If we are
1336 just annotating types, throw away the initialization if it isn't a
1337 constant. */
1338 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1339 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1340 var_init = NULL_TREE;
1341
1342 DECL_INITIAL (var_decl) = var_init;
1343 TREE_READONLY (var_decl) = const_flag;
1344 DECL_EXTERNAL (var_decl) = extern_flag;
1345 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1346 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1347 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1348 = TYPE_VOLATILE (type);
1349
1350 /* If it's public and not external, always allocate storage for it.
1351 At the global binding level we need to allocate static storage for the
1352 variable if and only if it's not external. If we are not at the top level
1353 we allocate automatic storage unless requested not to. */
1354 TREE_STATIC (var_decl)
1355 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1356
1357 if (asm_name)
1358 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1359
1360 process_attributes (var_decl, attr_list);
1361
1362 /* Add this decl to the current binding level. */
1363 gnat_pushdecl (var_decl, gnat_node);
1364
1365 if (TREE_SIDE_EFFECTS (var_decl))
1366 TREE_ADDRESSABLE (var_decl) = 1;
1367
1368 if (TREE_CODE (var_decl) != CONST_DECL)
1369 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1370
1371 return var_decl;
1372 }
1373 \f
1374 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1375 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1376 this field is in a record type with a "pragma pack". If SIZE is nonzero
1377 it is the specified size for this field. If POS is nonzero, it is the bit
1378 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1379 the address of this field for aliasing purposes. */
1380
1381 tree
1382 create_field_decl (tree field_name, tree field_type, tree record_type,
1383 int packed, tree size, tree pos, int addressable)
1384 {
1385 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1386
1387 DECL_CONTEXT (field_decl) = record_type;
1388 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1389
1390 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1391 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1392 if (packed && TYPE_MODE (field_type) == BLKmode)
1393 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1394
1395 /* If a size is specified, use it. Otherwise, if the record type is packed
1396 compute a size to use, which may differ from the object's natural size.
1397 We always set a size in this case to trigger the checks for bitfield
1398 creation below, which is typically required when no position has been
1399 specified. */
1400 if (size)
1401 size = convert (bitsizetype, size);
1402 else if (packed == 1)
1403 {
1404 size = rm_size (field_type);
1405
1406 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1407 byte. */
1408 if (TREE_CODE (size) == INTEGER_CST
1409 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1410 size = round_up (size, BITS_PER_UNIT);
1411 }
1412
1413 /* Make a bitfield if a size is specified for two reasons: first if the size
1414 differs from the natural size. Second, if the alignment is insufficient.
1415 There are a number of ways the latter can be true.
1416
1417 We never make a bitfield if the type of the field has a nonconstant size,
1418 or if it is claimed to be addressable, because no such entity requiring
1419 bitfield operations should reach here.
1420
1421 We do *preventively* make a bitfield when there might be the need for it
1422 but we don't have all the necessary information to decide, as is the case
1423 of a field with no specified position in a packed record.
1424
1425 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1426 in layout_decl or finish_record_type to clear the bit_field indication if
1427 it is in fact not needed. */
1428 if (size && TREE_CODE (size) == INTEGER_CST
1429 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1430 && !addressable
1431 && (!operand_equal_p (TYPE_SIZE (field_type), size, 0)
1432 || (pos
1433 && !value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1434 bitsize_int (TYPE_ALIGN
1435 (field_type)))))
1436 || packed
1437 || (TYPE_ALIGN (record_type) != 0
1438 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1439 {
1440 DECL_BIT_FIELD (field_decl) = 1;
1441 DECL_SIZE (field_decl) = size;
1442 if (!packed && !pos)
1443 DECL_ALIGN (field_decl)
1444 = (TYPE_ALIGN (record_type) != 0
1445 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1446 : TYPE_ALIGN (field_type));
1447 }
1448
1449 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1450 DECL_ALIGN (field_decl)
1451 = MAX (DECL_ALIGN (field_decl),
1452 DECL_BIT_FIELD (field_decl) ? 1
1453 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1454 : TYPE_ALIGN (field_type));
1455
1456 if (pos)
1457 {
1458 /* We need to pass in the alignment the DECL is known to have.
1459 This is the lowest-order bit set in POS, but no more than
1460 the alignment of the record, if one is specified. Note
1461 that an alignment of 0 is taken as infinite. */
1462 unsigned int known_align;
1463
1464 if (host_integerp (pos, 1))
1465 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1466 else
1467 known_align = BITS_PER_UNIT;
1468
1469 if (TYPE_ALIGN (record_type)
1470 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1471 known_align = TYPE_ALIGN (record_type);
1472
1473 layout_decl (field_decl, known_align);
1474 SET_DECL_OFFSET_ALIGN (field_decl,
1475 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1476 : BITS_PER_UNIT);
1477 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1478 &DECL_FIELD_BIT_OFFSET (field_decl),
1479 DECL_OFFSET_ALIGN (field_decl), pos);
1480
1481 DECL_HAS_REP_P (field_decl) = 1;
1482 }
1483
1484 /* If the field type is passed by reference, we will have pointers to the
1485 field, so it is addressable. */
1486 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1487 addressable = 1;
1488
1489 /* ??? For now, we say that any field of aggregate type is addressable
1490 because the front end may take 'Reference of it. */
1491 if (AGGREGATE_TYPE_P (field_type))
1492 addressable = 1;
1493
1494 /* Mark the decl as nonaddressable if it is indicated so semantically,
1495 meaning we won't ever attempt to take the address of the field.
1496
1497 It may also be "technically" nonaddressable, meaning that even if we
1498 attempt to take the field's address we will actually get the address of a
1499 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1500 we have at this point is not accurate enough, so we don't account for
1501 this here and let finish_record_type decide. */
1502 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1503
1504 return field_decl;
1505 }
1506
1507 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1508 effects, has the value of zero. */
1509
1510 static bool
1511 value_zerop (tree exp)
1512 {
1513 if (TREE_CODE (exp) == COMPOUND_EXPR)
1514 return value_zerop (TREE_OPERAND (exp, 1));
1515
1516 return integer_zerop (exp);
1517 }
1518 \f
1519 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1520 PARAM_TYPE is its type. READONLY is true if the parameter is
1521 readonly (either an IN parameter or an address of a pass-by-ref
1522 parameter). */
1523
1524 tree
1525 create_param_decl (tree param_name, tree param_type, bool readonly)
1526 {
1527 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1528
1529 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1530 lead to various ABI violations. */
1531 if (targetm.calls.promote_prototypes (param_type)
1532 && (TREE_CODE (param_type) == INTEGER_TYPE
1533 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1534 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1535 {
1536 /* We have to be careful about biased types here. Make a subtype
1537 of integer_type_node with the proper biasing. */
1538 if (TREE_CODE (param_type) == INTEGER_TYPE
1539 && TYPE_BIASED_REPRESENTATION_P (param_type))
1540 {
1541 param_type
1542 = copy_type (build_range_type (integer_type_node,
1543 TYPE_MIN_VALUE (param_type),
1544 TYPE_MAX_VALUE (param_type)));
1545
1546 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1547 }
1548 else
1549 param_type = integer_type_node;
1550 }
1551
1552 DECL_ARG_TYPE (param_decl) = param_type;
1553 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1554 TREE_READONLY (param_decl) = readonly;
1555 return param_decl;
1556 }
1557 \f
1558 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1559
1560 void
1561 process_attributes (tree decl, struct attrib *attr_list)
1562 {
1563 for (; attr_list; attr_list = attr_list->next)
1564 switch (attr_list->type)
1565 {
1566 case ATTR_MACHINE_ATTRIBUTE:
1567 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1568 NULL_TREE),
1569 ATTR_FLAG_TYPE_IN_PLACE);
1570 break;
1571
1572 case ATTR_LINK_ALIAS:
1573 TREE_STATIC (decl) = 1;
1574 assemble_alias (decl, attr_list->name);
1575 break;
1576
1577 case ATTR_WEAK_EXTERNAL:
1578 if (SUPPORTS_WEAK)
1579 declare_weak (decl);
1580 else
1581 post_error ("?weak declarations not supported on this target",
1582 attr_list->error_point);
1583 break;
1584
1585 case ATTR_LINK_SECTION:
1586 if (targetm.have_named_sections)
1587 {
1588 DECL_SECTION_NAME (decl)
1589 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1590 IDENTIFIER_POINTER (attr_list->name));
1591 }
1592 else
1593 post_error ("?section attributes are not supported for this target",
1594 attr_list->error_point);
1595 break;
1596 }
1597 }
1598 \f
1599 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1600 a power of 2. */
1601
1602 static bool
1603 value_factor_p (tree value, HOST_WIDE_INT factor)
1604 {
1605 if (host_integerp (value, 1))
1606 return tree_low_cst (value, 1) % factor == 0;
1607
1608 if (TREE_CODE (value) == MULT_EXPR)
1609 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1610 || value_factor_p (TREE_OPERAND (value, 1), factor));
1611
1612 return 0;
1613 }
1614
1615 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1616 unless we can prove these 2 fields are laid out in such a way that no gap
1617 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1618 is the distance in bits between the end of PREV_FIELD and the starting
1619 position of CURR_FIELD. It is ignored if null. */
1620
1621 static bool
1622 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1623 {
1624 /* If this is the first field of the record, there cannot be any gap */
1625 if (!prev_field)
1626 return false;
1627
1628 /* If the previous field is a union type, then return False: The only
1629 time when such a field is not the last field of the record is when
1630 there are other components at fixed positions after it (meaning there
1631 was a rep clause for every field), in which case we don't want the
1632 alignment constraint to override them. */
1633 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1634 return false;
1635
1636 /* If the distance between the end of prev_field and the begining of
1637 curr_field is constant, then there is a gap if the value of this
1638 constant is not null. */
1639 if (offset && host_integerp (offset, 1))
1640 return !integer_zerop (offset);
1641
1642 /* If the size and position of the previous field are constant,
1643 then check the sum of this size and position. There will be a gap
1644 iff it is not multiple of the current field alignment. */
1645 if (host_integerp (DECL_SIZE (prev_field), 1)
1646 && host_integerp (bit_position (prev_field), 1))
1647 return ((tree_low_cst (bit_position (prev_field), 1)
1648 + tree_low_cst (DECL_SIZE (prev_field), 1))
1649 % DECL_ALIGN (curr_field) != 0);
1650
1651 /* If both the position and size of the previous field are multiples
1652 of the current field alignment, there can not be any gap. */
1653 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1654 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1655 return false;
1656
1657 /* Fallback, return that there may be a potential gap */
1658 return true;
1659 }
1660
1661 /* Returns a LABEL_DECL node for LABEL_NAME. */
1662
1663 tree
1664 create_label_decl (tree label_name)
1665 {
1666 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1667
1668 DECL_CONTEXT (label_decl) = current_function_decl;
1669 DECL_MODE (label_decl) = VOIDmode;
1670 DECL_SOURCE_LOCATION (label_decl) = input_location;
1671
1672 return label_decl;
1673 }
1674 \f
1675 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1676 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1677 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1678 PARM_DECL nodes chained through the TREE_CHAIN field).
1679
1680 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1681 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1682
1683 tree
1684 create_subprog_decl (tree subprog_name, tree asm_name,
1685 tree subprog_type, tree param_decl_list, bool inline_flag,
1686 bool public_flag, bool extern_flag,
1687 struct attrib *attr_list, Node_Id gnat_node)
1688 {
1689 tree return_type = TREE_TYPE (subprog_type);
1690 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1691
1692 /* If this is a function nested inside an inlined external function, it
1693 means we aren't going to compile the outer function unless it is
1694 actually inlined, so do the same for us. */
1695 if (current_function_decl && DECL_INLINE (current_function_decl)
1696 && DECL_EXTERNAL (current_function_decl))
1697 extern_flag = true;
1698
1699 DECL_EXTERNAL (subprog_decl) = extern_flag;
1700 TREE_PUBLIC (subprog_decl) = public_flag;
1701 TREE_STATIC (subprog_decl) = 1;
1702 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1703 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1704 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1705 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1706 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1707 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1708 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1709
1710 if (inline_flag)
1711 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1712
1713 if (asm_name)
1714 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1715
1716 process_attributes (subprog_decl, attr_list);
1717
1718 /* Add this decl to the current binding level. */
1719 gnat_pushdecl (subprog_decl, gnat_node);
1720
1721 /* Output the assembler code and/or RTL for the declaration. */
1722 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1723
1724 return subprog_decl;
1725 }
1726 \f
1727 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1728 body. This routine needs to be invoked before processing the declarations
1729 appearing in the subprogram. */
1730
1731 void
1732 begin_subprog_body (tree subprog_decl)
1733 {
1734 tree param_decl;
1735
1736 current_function_decl = subprog_decl;
1737 announce_function (subprog_decl);
1738
1739 /* Enter a new binding level and show that all the parameters belong to
1740 this function. */
1741 gnat_pushlevel ();
1742 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1743 param_decl = TREE_CHAIN (param_decl))
1744 DECL_CONTEXT (param_decl) = subprog_decl;
1745
1746 make_decl_rtl (subprog_decl);
1747
1748 /* We handle pending sizes via the elaboration of types, so we don't need to
1749 save them. This causes them to be marked as part of the outer function
1750 and then discarded. */
1751 get_pending_sizes ();
1752 }
1753
1754 /* Finish the definition of the current subprogram and compile it all the way
1755 to assembler language output. BODY is the tree corresponding to
1756 the subprogram. */
1757
1758 void
1759 end_subprog_body (tree body)
1760 {
1761 tree fndecl = current_function_decl;
1762
1763 /* Mark the BLOCK for this level as being for this function and pop the
1764 level. Since the vars in it are the parameters, clear them. */
1765 BLOCK_VARS (current_binding_level->block) = 0;
1766 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1767 DECL_INITIAL (fndecl) = current_binding_level->block;
1768 gnat_poplevel ();
1769
1770 /* Deal with inline. If declared inline or we should default to inline,
1771 set the flag in the decl. */
1772 DECL_INLINE (fndecl)
1773 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1774
1775 /* We handle pending sizes via the elaboration of types, so we don't
1776 need to save them. */
1777 get_pending_sizes ();
1778
1779 /* Mark the RESULT_DECL as being in this subprogram. */
1780 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1781
1782 DECL_SAVED_TREE (fndecl) = body;
1783
1784 current_function_decl = DECL_CONTEXT (fndecl);
1785 cfun = NULL;
1786
1787 /* If we're only annotating types, don't actually compile this function. */
1788 if (type_annotate_only)
1789 return;
1790
1791 /* We do different things for nested and non-nested functions.
1792 ??? This should be in cgraph. */
1793 if (!DECL_CONTEXT (fndecl))
1794 {
1795 gnat_gimplify_function (fndecl);
1796 cgraph_finalize_function (fndecl, false);
1797 }
1798 else
1799 /* Register this function with cgraph just far enough to get it
1800 added to our parent's nested function list. */
1801 (void) cgraph_node (fndecl);
1802 }
1803
1804 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1805
1806 static void
1807 gnat_gimplify_function (tree fndecl)
1808 {
1809 struct cgraph_node *cgn;
1810
1811 dump_function (TDI_original, fndecl);
1812 gimplify_function_tree (fndecl);
1813 dump_function (TDI_generic, fndecl);
1814
1815 /* Convert all nested functions to GIMPLE now. We do things in this order
1816 so that items like VLA sizes are expanded properly in the context of the
1817 correct function. */
1818 cgn = cgraph_node (fndecl);
1819 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1820 gnat_gimplify_function (cgn->decl);
1821 }
1822 \f
1823 /* Return a definition for a builtin function named NAME and whose data type
1824 is TYPE. TYPE should be a function type with argument types.
1825 FUNCTION_CODE tells later passes how to compile calls to this function.
1826 See tree.h for its possible values.
1827
1828 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1829 the name to be called if we can't opencode the function. If
1830 ATTRS is nonzero, use that for the function attribute list. */
1831
1832 tree
1833 builtin_function (const char *name, tree type, int function_code,
1834 enum built_in_class class, const char *library_name,
1835 tree attrs)
1836 {
1837 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1838
1839 DECL_EXTERNAL (decl) = 1;
1840 TREE_PUBLIC (decl) = 1;
1841 if (library_name)
1842 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1843
1844 gnat_pushdecl (decl, Empty);
1845 DECL_BUILT_IN_CLASS (decl) = class;
1846 DECL_FUNCTION_CODE (decl) = function_code;
1847 if (attrs)
1848 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1849 return decl;
1850 }
1851
1852 /* Return an integer type with the number of bits of precision given by
1853 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1854 it is a signed type. */
1855
1856 tree
1857 gnat_type_for_size (unsigned precision, int unsignedp)
1858 {
1859 tree t;
1860 char type_name[20];
1861
1862 if (precision <= 2 * MAX_BITS_PER_WORD
1863 && signed_and_unsigned_types[precision][unsignedp])
1864 return signed_and_unsigned_types[precision][unsignedp];
1865
1866 if (unsignedp)
1867 t = make_unsigned_type (precision);
1868 else
1869 t = make_signed_type (precision);
1870
1871 if (precision <= 2 * MAX_BITS_PER_WORD)
1872 signed_and_unsigned_types[precision][unsignedp] = t;
1873
1874 if (!TYPE_NAME (t))
1875 {
1876 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1877 TYPE_NAME (t) = get_identifier (type_name);
1878 }
1879
1880 return t;
1881 }
1882
1883 /* Likewise for floating-point types. */
1884
1885 static tree
1886 float_type_for_precision (int precision, enum machine_mode mode)
1887 {
1888 tree t;
1889 char type_name[20];
1890
1891 if (float_types[(int) mode])
1892 return float_types[(int) mode];
1893
1894 float_types[(int) mode] = t = make_node (REAL_TYPE);
1895 TYPE_PRECISION (t) = precision;
1896 layout_type (t);
1897
1898 gcc_assert (TYPE_MODE (t) == mode);
1899 if (!TYPE_NAME (t))
1900 {
1901 sprintf (type_name, "FLOAT_%d", precision);
1902 TYPE_NAME (t) = get_identifier (type_name);
1903 }
1904
1905 return t;
1906 }
1907
1908 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1909 an unsigned type; otherwise a signed type is returned. */
1910
1911 tree
1912 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1913 {
1914 if (mode == BLKmode)
1915 return NULL_TREE;
1916 else if (mode == VOIDmode)
1917 return void_type_node;
1918 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1919 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1920 else
1921 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1922 }
1923
1924 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1925
1926 tree
1927 gnat_unsigned_type (tree type_node)
1928 {
1929 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1930
1931 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1932 {
1933 type = copy_node (type);
1934 TREE_TYPE (type) = type_node;
1935 }
1936 else if (TREE_TYPE (type_node)
1937 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1938 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1939 {
1940 type = copy_node (type);
1941 TREE_TYPE (type) = TREE_TYPE (type_node);
1942 }
1943
1944 return type;
1945 }
1946
1947 /* Return the signed version of a TYPE_NODE, a scalar type. */
1948
1949 tree
1950 gnat_signed_type (tree type_node)
1951 {
1952 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1953
1954 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1955 {
1956 type = copy_node (type);
1957 TREE_TYPE (type) = type_node;
1958 }
1959 else if (TREE_TYPE (type_node)
1960 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1961 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1962 {
1963 type = copy_node (type);
1964 TREE_TYPE (type) = TREE_TYPE (type_node);
1965 }
1966
1967 return type;
1968 }
1969
1970 /* Return a type the same as TYPE except unsigned or signed according to
1971 UNSIGNEDP. */
1972
1973 tree
1974 gnat_signed_or_unsigned_type (int unsignedp, tree type)
1975 {
1976 if (!INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
1977 return type;
1978 else
1979 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
1980 }
1981 \f
1982 /* EXP is an expression for the size of an object. If this size contains
1983 discriminant references, replace them with the maximum (if MAX_P) or
1984 minimum (if !MAX_P) possible value of the discriminant. */
1985
1986 tree
1987 max_size (tree exp, bool max_p)
1988 {
1989 enum tree_code code = TREE_CODE (exp);
1990 tree type = TREE_TYPE (exp);
1991
1992 switch (TREE_CODE_CLASS (code))
1993 {
1994 case tcc_declaration:
1995 case tcc_constant:
1996 return exp;
1997
1998 case tcc_exceptional:
1999 if (code == TREE_LIST)
2000 return tree_cons (TREE_PURPOSE (exp),
2001 max_size (TREE_VALUE (exp), max_p),
2002 TREE_CHAIN (exp)
2003 ? max_size (TREE_CHAIN (exp), max_p) : NULL_TREE);
2004 break;
2005
2006 case tcc_reference:
2007 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2008 modify. Otherwise, we treat it like a variable. */
2009 if (!CONTAINS_PLACEHOLDER_P (exp))
2010 return exp;
2011
2012 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2013 return
2014 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2015
2016 case tcc_comparison:
2017 return max_p ? size_one_node : size_zero_node;
2018
2019 case tcc_unary:
2020 case tcc_binary:
2021 case tcc_expression:
2022 switch (TREE_CODE_LENGTH (code))
2023 {
2024 case 1:
2025 if (code == NON_LVALUE_EXPR)
2026 return max_size (TREE_OPERAND (exp, 0), max_p);
2027 else
2028 return
2029 fold (build1 (code, type,
2030 max_size (TREE_OPERAND (exp, 0),
2031 code == NEGATE_EXPR ? !max_p : max_p)));
2032
2033 case 2:
2034 if (code == COMPOUND_EXPR)
2035 return max_size (TREE_OPERAND (exp, 1), max_p);
2036
2037 {
2038 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2039 tree rhs = max_size (TREE_OPERAND (exp, 1),
2040 code == MINUS_EXPR ? !max_p : max_p);
2041
2042 /* Special-case wanting the maximum value of a MIN_EXPR.
2043 In that case, if one side overflows, return the other.
2044 sizetype is signed, but we know sizes are non-negative.
2045 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2046 overflowing or the maximum possible value and the RHS
2047 a variable. */
2048 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2049 return lhs;
2050 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2051 return rhs;
2052 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2053 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2054 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2055 && !TREE_CONSTANT (rhs))
2056 return lhs;
2057 else
2058 return fold (build2 (code, type, lhs, rhs));
2059 }
2060
2061 case 3:
2062 if (code == SAVE_EXPR)
2063 return exp;
2064 else if (code == COND_EXPR)
2065 return fold (build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2066 max_size (TREE_OPERAND (exp, 1), max_p),
2067 max_size (TREE_OPERAND (exp, 2), max_p)));
2068 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1))
2069 return build3 (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2070 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2071 }
2072
2073 /* Other tree classes cannot happen. */
2074 default:
2075 break;
2076 }
2077
2078 gcc_unreachable ();
2079 }
2080 \f
2081 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2082 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2083 Return a constructor for the template. */
2084
2085 tree
2086 build_template (tree template_type, tree array_type, tree expr)
2087 {
2088 tree template_elts = NULL_TREE;
2089 tree bound_list = NULL_TREE;
2090 tree field;
2091
2092 if (TREE_CODE (array_type) == RECORD_TYPE
2093 && (TYPE_IS_PADDING_P (array_type)
2094 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2095 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2096
2097 if (TREE_CODE (array_type) == ARRAY_TYPE
2098 || (TREE_CODE (array_type) == INTEGER_TYPE
2099 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2100 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2101
2102 /* First make the list for a CONSTRUCTOR for the template. Go down the
2103 field list of the template instead of the type chain because this
2104 array might be an Ada array of arrays and we can't tell where the
2105 nested arrays stop being the underlying object. */
2106
2107 for (field = TYPE_FIELDS (template_type); field;
2108 (bound_list
2109 ? (bound_list = TREE_CHAIN (bound_list))
2110 : (array_type = TREE_TYPE (array_type))),
2111 field = TREE_CHAIN (TREE_CHAIN (field)))
2112 {
2113 tree bounds, min, max;
2114
2115 /* If we have a bound list, get the bounds from there. Likewise
2116 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2117 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2118 This will give us a maximum range. */
2119 if (bound_list)
2120 bounds = TREE_VALUE (bound_list);
2121 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2122 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2123 else if (expr && TREE_CODE (expr) == PARM_DECL
2124 && DECL_BY_COMPONENT_PTR_P (expr))
2125 bounds = TREE_TYPE (field);
2126 else
2127 gcc_unreachable ();
2128
2129 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2130 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2131
2132 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2133 substitute it from OBJECT. */
2134 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2135 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2136
2137 template_elts = tree_cons (TREE_CHAIN (field), max,
2138 tree_cons (field, min, template_elts));
2139 }
2140
2141 return gnat_build_constructor (template_type, nreverse (template_elts));
2142 }
2143 \f
2144 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2145 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2146 in the type contains in its DECL_INITIAL the expression to use when
2147 a constructor is made for the type. GNAT_ENTITY is an entity used
2148 to print out an error message if the mechanism cannot be applied to
2149 an object of that type and also for the name. */
2150
2151 tree
2152 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2153 {
2154 tree record_type = make_node (RECORD_TYPE);
2155 tree field_list = 0;
2156 int class;
2157 int dtype = 0;
2158 tree inner_type;
2159 int ndim;
2160 int i;
2161 tree *idx_arr;
2162 tree tem;
2163
2164 /* If TYPE is an unconstrained array, use the underlying array type. */
2165 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2166 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2167
2168 /* If this is an array, compute the number of dimensions in the array,
2169 get the index types, and point to the inner type. */
2170 if (TREE_CODE (type) != ARRAY_TYPE)
2171 ndim = 0;
2172 else
2173 for (ndim = 1, inner_type = type;
2174 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2175 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2176 ndim++, inner_type = TREE_TYPE (inner_type))
2177 ;
2178
2179 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2180
2181 if (mech != By_Descriptor_NCA
2182 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2183 for (i = ndim - 1, inner_type = type;
2184 i >= 0;
2185 i--, inner_type = TREE_TYPE (inner_type))
2186 idx_arr[i] = TYPE_DOMAIN (inner_type);
2187 else
2188 for (i = 0, inner_type = type;
2189 i < ndim;
2190 i++, inner_type = TREE_TYPE (inner_type))
2191 idx_arr[i] = TYPE_DOMAIN (inner_type);
2192
2193 /* Now get the DTYPE value. */
2194 switch (TREE_CODE (type))
2195 {
2196 case INTEGER_TYPE:
2197 case ENUMERAL_TYPE:
2198 if (TYPE_VAX_FLOATING_POINT_P (type))
2199 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2200 {
2201 case 6:
2202 dtype = 10;
2203 break;
2204 case 9:
2205 dtype = 11;
2206 break;
2207 case 15:
2208 dtype = 27;
2209 break;
2210 }
2211 else
2212 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2213 {
2214 case 8:
2215 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2216 break;
2217 case 16:
2218 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2219 break;
2220 case 32:
2221 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2222 break;
2223 case 64:
2224 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2225 break;
2226 case 128:
2227 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2228 break;
2229 }
2230 break;
2231
2232 case REAL_TYPE:
2233 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2234 break;
2235
2236 case COMPLEX_TYPE:
2237 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2238 && TYPE_VAX_FLOATING_POINT_P (type))
2239 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2240 {
2241 case 6:
2242 dtype = 12;
2243 break;
2244 case 9:
2245 dtype = 13;
2246 break;
2247 case 15:
2248 dtype = 29;
2249 }
2250 else
2251 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2252 break;
2253
2254 case ARRAY_TYPE:
2255 dtype = 14;
2256 break;
2257
2258 default:
2259 break;
2260 }
2261
2262 /* Get the CLASS value. */
2263 switch (mech)
2264 {
2265 case By_Descriptor_A:
2266 class = 4;
2267 break;
2268 case By_Descriptor_NCA:
2269 class = 10;
2270 break;
2271 case By_Descriptor_SB:
2272 class = 15;
2273 break;
2274 default:
2275 class = 1;
2276 }
2277
2278 /* Make the type for a descriptor for VMS. The first four fields
2279 are the same for all types. */
2280
2281 field_list
2282 = chainon (field_list,
2283 make_descriptor_field
2284 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2285 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2286
2287 field_list = chainon (field_list,
2288 make_descriptor_field ("DTYPE",
2289 gnat_type_for_size (8, 1),
2290 record_type, size_int (dtype)));
2291 field_list = chainon (field_list,
2292 make_descriptor_field ("CLASS",
2293 gnat_type_for_size (8, 1),
2294 record_type, size_int (class)));
2295
2296 field_list
2297 = chainon (field_list,
2298 make_descriptor_field
2299 ("POINTER",
2300 build_pointer_type_for_mode (type, SImode, false), record_type,
2301 build1 (ADDR_EXPR,
2302 build_pointer_type_for_mode (type, SImode, false),
2303 build0 (PLACEHOLDER_EXPR, type))));
2304
2305 switch (mech)
2306 {
2307 case By_Descriptor:
2308 case By_Descriptor_S:
2309 break;
2310
2311 case By_Descriptor_SB:
2312 field_list
2313 = chainon (field_list,
2314 make_descriptor_field
2315 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2316 TREE_CODE (type) == ARRAY_TYPE
2317 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2318 field_list
2319 = chainon (field_list,
2320 make_descriptor_field
2321 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2322 TREE_CODE (type) == ARRAY_TYPE
2323 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2324 break;
2325
2326 case By_Descriptor_A:
2327 case By_Descriptor_NCA:
2328 field_list = chainon (field_list,
2329 make_descriptor_field ("SCALE",
2330 gnat_type_for_size (8, 1),
2331 record_type,
2332 size_zero_node));
2333
2334 field_list = chainon (field_list,
2335 make_descriptor_field ("DIGITS",
2336 gnat_type_for_size (8, 1),
2337 record_type,
2338 size_zero_node));
2339
2340 field_list
2341 = chainon (field_list,
2342 make_descriptor_field
2343 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2344 size_int (mech == By_Descriptor_NCA
2345 ? 0
2346 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2347 : (TREE_CODE (type) == ARRAY_TYPE
2348 && TYPE_CONVENTION_FORTRAN_P (type)
2349 ? 224 : 192))));
2350
2351 field_list = chainon (field_list,
2352 make_descriptor_field ("DIMCT",
2353 gnat_type_for_size (8, 1),
2354 record_type,
2355 size_int (ndim)));
2356
2357 field_list = chainon (field_list,
2358 make_descriptor_field ("ARSIZE",
2359 gnat_type_for_size (32, 1),
2360 record_type,
2361 size_in_bytes (type)));
2362
2363 /* Now build a pointer to the 0,0,0... element. */
2364 tem = build0 (PLACEHOLDER_EXPR, type);
2365 for (i = 0, inner_type = type; i < ndim;
2366 i++, inner_type = TREE_TYPE (inner_type))
2367 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2368 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2369 NULL_TREE, NULL_TREE);
2370
2371 field_list
2372 = chainon (field_list,
2373 make_descriptor_field
2374 ("A0",
2375 build_pointer_type_for_mode (inner_type, SImode, false),
2376 record_type,
2377 build1 (ADDR_EXPR,
2378 build_pointer_type_for_mode (inner_type, SImode,
2379 false),
2380 tem)));
2381
2382 /* Next come the addressing coefficients. */
2383 tem = size_int (1);
2384 for (i = 0; i < ndim; i++)
2385 {
2386 char fname[3];
2387 tree idx_length
2388 = size_binop (MULT_EXPR, tem,
2389 size_binop (PLUS_EXPR,
2390 size_binop (MINUS_EXPR,
2391 TYPE_MAX_VALUE (idx_arr[i]),
2392 TYPE_MIN_VALUE (idx_arr[i])),
2393 size_int (1)));
2394
2395 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2396 fname[1] = '0' + i, fname[2] = 0;
2397 field_list
2398 = chainon (field_list,
2399 make_descriptor_field (fname,
2400 gnat_type_for_size (32, 1),
2401 record_type, idx_length));
2402
2403 if (mech == By_Descriptor_NCA)
2404 tem = idx_length;
2405 }
2406
2407 /* Finally here are the bounds. */
2408 for (i = 0; i < ndim; i++)
2409 {
2410 char fname[3];
2411
2412 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2413 field_list
2414 = chainon (field_list,
2415 make_descriptor_field
2416 (fname, gnat_type_for_size (32, 1), record_type,
2417 TYPE_MIN_VALUE (idx_arr[i])));
2418
2419 fname[0] = 'U';
2420 field_list
2421 = chainon (field_list,
2422 make_descriptor_field
2423 (fname, gnat_type_for_size (32, 1), record_type,
2424 TYPE_MAX_VALUE (idx_arr[i])));
2425 }
2426 break;
2427
2428 default:
2429 post_error ("unsupported descriptor type for &", gnat_entity);
2430 }
2431
2432 finish_record_type (record_type, field_list, false, true);
2433 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2434 NULL, true, false, gnat_entity);
2435
2436 return record_type;
2437 }
2438
2439 /* Utility routine for above code to make a field. */
2440
2441 static tree
2442 make_descriptor_field (const char *name, tree type,
2443 tree rec_type, tree initial)
2444 {
2445 tree field
2446 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2447
2448 DECL_INITIAL (field) = initial;
2449 return field;
2450 }
2451 \f
2452 /* Build a type to be used to represent an aliased object whose nominal
2453 type is an unconstrained array. This consists of a RECORD_TYPE containing
2454 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2455 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2456 is used to represent an arbitrary unconstrained object. Use NAME
2457 as the name of the record. */
2458
2459 tree
2460 build_unc_object_type (tree template_type, tree object_type, tree name)
2461 {
2462 tree type = make_node (RECORD_TYPE);
2463 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2464 template_type, type, 0, 0, 0, 1);
2465 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2466 type, 0, 0, 0, 1);
2467
2468 TYPE_NAME (type) = name;
2469 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2470 finish_record_type (type,
2471 chainon (chainon (NULL_TREE, template_field),
2472 array_field),
2473 false, false);
2474
2475 return type;
2476 }
2477 \f
2478 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2479 the normal case this is just two adjustments, but we have more to do
2480 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2481
2482 void
2483 update_pointer_to (tree old_type, tree new_type)
2484 {
2485 tree ptr = TYPE_POINTER_TO (old_type);
2486 tree ref = TYPE_REFERENCE_TO (old_type);
2487 tree ptr1, ref1;
2488 tree type;
2489
2490 /* If this is the main variant, process all the other variants first. */
2491 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2492 for (type = TYPE_NEXT_VARIANT (old_type); type;
2493 type = TYPE_NEXT_VARIANT (type))
2494 update_pointer_to (type, new_type);
2495
2496 /* If no pointer or reference, we are done. */
2497 if (!ptr && !ref)
2498 return;
2499
2500 /* Merge the old type qualifiers in the new type.
2501
2502 Each old variant has qualifiers for specific reasons, and the new
2503 designated type as well. Each set of qualifiers represents useful
2504 information grabbed at some point, and merging the two simply unifies
2505 these inputs into the final type description.
2506
2507 Consider for instance a volatile type frozen after an access to constant
2508 type designating it. After the designated type freeze, we get here with a
2509 volatile new_type and a dummy old_type with a readonly variant, created
2510 when the access type was processed. We shall make a volatile and readonly
2511 designated type, because that's what it really is.
2512
2513 We might also get here for a non-dummy old_type variant with different
2514 qualifiers than the new_type ones, for instance in some cases of pointers
2515 to private record type elaboration (see the comments around the call to
2516 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2517 qualifiers in thoses cases too, to avoid accidentally discarding the
2518 initial set, and will often end up with old_type == new_type then. */
2519 new_type = build_qualified_type (new_type,
2520 TYPE_QUALS (old_type)
2521 | TYPE_QUALS (new_type));
2522
2523 /* If the new type and the old one are identical, there is nothing to
2524 update. */
2525 if (old_type == new_type)
2526 return;
2527
2528 /* Otherwise, first handle the simple case. */
2529 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2530 {
2531 TYPE_POINTER_TO (new_type) = ptr;
2532 TYPE_REFERENCE_TO (new_type) = ref;
2533
2534 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2535 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2536 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2537 {
2538 TREE_TYPE (ptr1) = new_type;
2539
2540 if (TYPE_NAME (ptr1)
2541 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2542 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2543 rest_of_decl_compilation (TYPE_NAME (ptr1),
2544 global_bindings_p (), 0);
2545 }
2546
2547 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2548 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2549 ref1 = TYPE_NEXT_VARIANT (ref1))
2550 {
2551 TREE_TYPE (ref1) = new_type;
2552
2553 if (TYPE_NAME (ref1)
2554 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2555 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2556 rest_of_decl_compilation (TYPE_NAME (ref1),
2557 global_bindings_p (), 0);
2558 }
2559 }
2560
2561 /* Now deal with the unconstrained array case. In this case the "pointer"
2562 is actually a RECORD_TYPE where the types of both fields are
2563 pointers to void. In that case, copy the field list from the
2564 old type to the new one and update the fields' context. */
2565 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
2566 gcc_unreachable ();
2567
2568 else
2569 {
2570 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2571 tree ptr_temp_type;
2572 tree new_ref;
2573 tree var;
2574
2575 SET_DECL_ORIGINAL_FIELD (TYPE_FIELDS (ptr),
2576 TYPE_FIELDS (TYPE_POINTER_TO (new_type)));
2577 SET_DECL_ORIGINAL_FIELD (TREE_CHAIN (TYPE_FIELDS (ptr)),
2578 TREE_CHAIN (TYPE_FIELDS
2579 (TYPE_POINTER_TO (new_type))));
2580
2581 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2582 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2583 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2584
2585 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2586 template bounds.
2587
2588 ??? This is now the only use of gnat_substitute_in_type, which
2589 is now a very "heavy" routine to do this, so it should be replaced
2590 at some point. */
2591 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2592 new_ref = build3 (COMPONENT_REF, ptr_temp_type,
2593 build0 (PLACEHOLDER_EXPR, ptr),
2594 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2595
2596 update_pointer_to
2597 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2598 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2599 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2600
2601 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2602 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2603
2604 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2605 = TREE_TYPE (new_type) = ptr;
2606
2607 /* Now handle updating the allocation record, what the thin pointer
2608 points to. Update all pointers from the old record into the new
2609 one, update the types of the fields, and recompute the size. */
2610
2611 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2612
2613 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2614 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2615 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2616 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2617 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2618 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2619 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2620
2621 TYPE_SIZE (new_obj_rec)
2622 = size_binop (PLUS_EXPR,
2623 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2624 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2625 TYPE_SIZE_UNIT (new_obj_rec)
2626 = size_binop (PLUS_EXPR,
2627 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2628 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2629 rest_of_type_compilation (ptr, global_bindings_p ());
2630 }
2631 }
2632 \f
2633 /* Convert a pointer to a constrained array into a pointer to a fat
2634 pointer. This involves making or finding a template. */
2635
2636 static tree
2637 convert_to_fat_pointer (tree type, tree expr)
2638 {
2639 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2640 tree template, template_addr;
2641 tree etype = TREE_TYPE (expr);
2642
2643 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2644 pointer to the template and array. */
2645 if (integer_zerop (expr))
2646 return
2647 gnat_build_constructor
2648 (type,
2649 tree_cons (TYPE_FIELDS (type),
2650 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2651 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2652 convert (build_pointer_type (template_type),
2653 expr),
2654 NULL_TREE)));
2655
2656 /* If EXPR is a thin pointer, make the template and data from the record. */
2657
2658 else if (TYPE_THIN_POINTER_P (etype))
2659 {
2660 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2661
2662 expr = save_expr (expr);
2663 if (TREE_CODE (expr) == ADDR_EXPR)
2664 expr = TREE_OPERAND (expr, 0);
2665 else
2666 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2667
2668 template = build_component_ref (expr, NULL_TREE, fields, false);
2669 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2670 build_component_ref (expr, NULL_TREE,
2671 TREE_CHAIN (fields), false));
2672 }
2673 else
2674 /* Otherwise, build the constructor for the template. */
2675 template = build_template (template_type, TREE_TYPE (etype), expr);
2676
2677 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2678
2679 /* The result is a CONSTRUCTOR for the fat pointer.
2680
2681 If expr is an argument of a foreign convention subprogram, the type it
2682 points to is directly the component type. In this case, the expression
2683 type may not match the corresponding FIELD_DECL type at this point, so we
2684 call "convert" here to fix that up if necessary. This type consistency is
2685 required, for instance because it ensures that possible later folding of
2686 component_refs against this constructor always yields something of the
2687 same type as the initial reference.
2688
2689 Note that the call to "build_template" above is still fine, because it
2690 will only refer to the provided template_type in this case. */
2691 return
2692 gnat_build_constructor
2693 (type, tree_cons (TYPE_FIELDS (type),
2694 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2695 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2696 template_addr, NULL_TREE)));
2697 }
2698 \f
2699 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2700 is something that is a fat pointer, so convert to it first if it EXPR
2701 is not already a fat pointer. */
2702
2703 static tree
2704 convert_to_thin_pointer (tree type, tree expr)
2705 {
2706 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2707 expr
2708 = convert_to_fat_pointer
2709 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2710
2711 /* We get the pointer to the data and use a NOP_EXPR to make it the
2712 proper GCC type. */
2713 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
2714 false);
2715 expr = build1 (NOP_EXPR, type, expr);
2716
2717 return expr;
2718 }
2719 \f
2720 /* Create an expression whose value is that of EXPR,
2721 converted to type TYPE. The TREE_TYPE of the value
2722 is always TYPE. This function implements all reasonable
2723 conversions; callers should filter out those that are
2724 not permitted by the language being compiled. */
2725
2726 tree
2727 convert (tree type, tree expr)
2728 {
2729 enum tree_code code = TREE_CODE (type);
2730 tree etype = TREE_TYPE (expr);
2731 enum tree_code ecode = TREE_CODE (etype);
2732 tree tem;
2733
2734 /* If EXPR is already the right type, we are done. */
2735 if (type == etype)
2736 return expr;
2737
2738 /* If the input type has padding, remove it by doing a component reference
2739 to the field. If the output type has padding, make a constructor
2740 to build the record. If both input and output have padding and are
2741 of variable size, do this as an unchecked conversion. */
2742 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2743 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2744 && (!TREE_CONSTANT (TYPE_SIZE (type))
2745 || !TREE_CONSTANT (TYPE_SIZE (etype))))
2746 ;
2747 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2748 {
2749 /* If we have just converted to this padded type, just get
2750 the inner expression. */
2751 if (TREE_CODE (expr) == CONSTRUCTOR
2752 && CONSTRUCTOR_ELTS (expr)
2753 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2754 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2755 else
2756 return convert (type,
2757 build_component_ref (expr, NULL_TREE,
2758 TYPE_FIELDS (etype), false));
2759 }
2760 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2761 {
2762 /* If we previously converted from another type and our type is
2763 of variable size, remove the conversion to avoid the need for
2764 variable-size temporaries. */
2765 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2766 && !TREE_CONSTANT (TYPE_SIZE (type)))
2767 expr = TREE_OPERAND (expr, 0);
2768
2769 /* If we are just removing the padding from expr, convert the original
2770 object if we have variable size. That will avoid the need
2771 for some variable-size temporaries. */
2772 if (TREE_CODE (expr) == COMPONENT_REF
2773 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2774 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2775 && !TREE_CONSTANT (TYPE_SIZE (type)))
2776 return convert (type, TREE_OPERAND (expr, 0));
2777
2778 /* If the result type is a padded type with a self-referentially-sized
2779 field and the expression type is a record, do this as an
2780 unchecked converstion. */
2781 else if (TREE_CODE (etype) == RECORD_TYPE
2782 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2783 return unchecked_convert (type, expr, false);
2784
2785 else
2786 return
2787 gnat_build_constructor (type,
2788 tree_cons (TYPE_FIELDS (type),
2789 convert (TREE_TYPE
2790 (TYPE_FIELDS (type)),
2791 expr),
2792 NULL_TREE));
2793 }
2794
2795 /* If the input is a biased type, adjust first. */
2796 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2797 return convert (type, fold (build2 (PLUS_EXPR, TREE_TYPE (etype),
2798 fold (build1 (NOP_EXPR,
2799 TREE_TYPE (etype),
2800 expr)),
2801 TYPE_MIN_VALUE (etype))));
2802
2803 /* If the input is a justified modular type, we need to extract
2804 the actual object before converting it to any other type with the
2805 exception of an unconstrained array. */
2806 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
2807 && code != UNCONSTRAINED_ARRAY_TYPE)
2808 return convert (type, build_component_ref (expr, NULL_TREE,
2809 TYPE_FIELDS (etype), false));
2810
2811 /* If converting to a type that contains a template, convert to the data
2812 type and then build the template. */
2813 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2814 {
2815 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2816
2817 /* If the source already has a template, get a reference to the
2818 associated array only, as we are going to rebuild a template
2819 for the target type anyway. */
2820 expr = maybe_unconstrained_array (expr);
2821
2822 return
2823 gnat_build_constructor
2824 (type,
2825 tree_cons (TYPE_FIELDS (type),
2826 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2827 obj_type, NULL_TREE),
2828 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2829 convert (obj_type, expr), NULL_TREE)));
2830 }
2831
2832 /* There are some special cases of expressions that we process
2833 specially. */
2834 switch (TREE_CODE (expr))
2835 {
2836 case ERROR_MARK:
2837 return expr;
2838
2839 case NULL_EXPR:
2840 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2841 conversion in gnat_expand_expr. NULL_EXPR does not represent
2842 and actual value, so no conversion is needed. */
2843 expr = copy_node (expr);
2844 TREE_TYPE (expr) = type;
2845 return expr;
2846
2847 case STRING_CST:
2848 /* If we are converting a STRING_CST to another constrained array type,
2849 just make a new one in the proper type. */
2850 if (code == ecode && AGGREGATE_TYPE_P (etype)
2851 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2852 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2853 && (TREE_CODE (expr) == STRING_CST
2854 || get_alias_set (etype) == get_alias_set (type)))
2855 {
2856 expr = copy_node (expr);
2857 TREE_TYPE (expr) = type;
2858 return expr;
2859 }
2860 break;
2861
2862 case UNCONSTRAINED_ARRAY_REF:
2863 /* Convert this to the type of the inner array by getting the address of
2864 the array from the template. */
2865 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2866 build_component_ref (TREE_OPERAND (expr, 0),
2867 get_identifier ("P_ARRAY"),
2868 NULL_TREE, false));
2869 etype = TREE_TYPE (expr);
2870 ecode = TREE_CODE (etype);
2871 break;
2872
2873 case VIEW_CONVERT_EXPR:
2874 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2875 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2876 return convert (type, TREE_OPERAND (expr, 0));
2877 break;
2878
2879 case INDIRECT_REF:
2880 /* If both types are record types, just convert the pointer and
2881 make a new INDIRECT_REF.
2882
2883 ??? Disable this for now since it causes problems with the
2884 code in build_binary_op for MODIFY_EXPR which wants to
2885 strip off conversions. But that code really is a mess and
2886 we need to do this a much better way some time. */
2887 if (0
2888 && (TREE_CODE (type) == RECORD_TYPE
2889 || TREE_CODE (type) == UNION_TYPE)
2890 && (TREE_CODE (etype) == RECORD_TYPE
2891 || TREE_CODE (etype) == UNION_TYPE)
2892 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2893 return build_unary_op (INDIRECT_REF, NULL_TREE,
2894 convert (build_pointer_type (type),
2895 TREE_OPERAND (expr, 0)));
2896 break;
2897
2898 default:
2899 break;
2900 }
2901
2902 /* Check for converting to a pointer to an unconstrained array. */
2903 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2904 return convert_to_fat_pointer (type, expr);
2905
2906 /* If we're converting between two aggregate types that have the same main
2907 variant, just make a VIEW_CONVER_EXPR. */
2908 else if (AGGREGATE_TYPE_P (type)
2909 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2910 return build1 (VIEW_CONVERT_EXPR, type, expr);
2911
2912 /* In all other cases of related types, make a NOP_EXPR. */
2913 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2914 || (code == INTEGER_CST && ecode == INTEGER_CST
2915 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2916 return fold (build1 (NOP_EXPR, type, expr));
2917
2918 switch (code)
2919 {
2920 case VOID_TYPE:
2921 return build1 (CONVERT_EXPR, type, expr);
2922
2923 case BOOLEAN_TYPE:
2924 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
2925
2926 case INTEGER_TYPE:
2927 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2928 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2929 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2930 return unchecked_convert (type, expr, false);
2931 else if (TYPE_BIASED_REPRESENTATION_P (type))
2932 return fold (build1 (CONVERT_EXPR, type,
2933 fold (build2 (MINUS_EXPR, TREE_TYPE (type),
2934 convert (TREE_TYPE (type), expr),
2935 TYPE_MIN_VALUE (type)))));
2936
2937 /* ... fall through ... */
2938
2939 case ENUMERAL_TYPE:
2940 return fold (convert_to_integer (type, expr));
2941
2942 case POINTER_TYPE:
2943 case REFERENCE_TYPE:
2944 /* If converting between two pointers to records denoting
2945 both a template and type, adjust if needed to account
2946 for any differing offsets, since one might be negative. */
2947 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2948 {
2949 tree bit_diff
2950 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2951 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2952 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2953 sbitsize_int (BITS_PER_UNIT));
2954
2955 expr = build1 (NOP_EXPR, type, expr);
2956 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2957 if (integer_zerop (byte_diff))
2958 return expr;
2959
2960 return build_binary_op (PLUS_EXPR, type, expr,
2961 fold (convert_to_pointer (type, byte_diff)));
2962 }
2963
2964 /* If converting to a thin pointer, handle specially. */
2965 if (TYPE_THIN_POINTER_P (type)
2966 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
2967 return convert_to_thin_pointer (type, expr);
2968
2969 /* If converting fat pointer to normal pointer, get the pointer to the
2970 array and then convert it. */
2971 else if (TYPE_FAT_POINTER_P (etype))
2972 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2973 NULL_TREE, false);
2974
2975 return fold (convert_to_pointer (type, expr));
2976
2977 case REAL_TYPE:
2978 return fold (convert_to_real (type, expr));
2979
2980 case RECORD_TYPE:
2981 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
2982 return
2983 gnat_build_constructor
2984 (type, tree_cons (TYPE_FIELDS (type),
2985 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2986 NULL_TREE));
2987
2988 /* ... fall through ... */
2989
2990 case ARRAY_TYPE:
2991 /* In these cases, assume the front-end has validated the conversion.
2992 If the conversion is valid, it will be a bit-wise conversion, so
2993 it can be viewed as an unchecked conversion. */
2994 return unchecked_convert (type, expr, false);
2995
2996 case UNION_TYPE:
2997 /* Just validate that the type is indeed that of a field
2998 of the type. Then make the simple conversion. */
2999 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3000 {
3001 if (TREE_TYPE (tem) == etype)
3002 return build1 (CONVERT_EXPR, type, expr);
3003 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3004 && (TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3005 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3006 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3007 return build1 (CONVERT_EXPR, type,
3008 convert (TREE_TYPE (tem), expr));
3009 }
3010
3011 gcc_unreachable ();
3012
3013 case UNCONSTRAINED_ARRAY_TYPE:
3014 /* If EXPR is a constrained array, take its address, convert it to a
3015 fat pointer, and then dereference it. Likewise if EXPR is a
3016 record containing both a template and a constrained array.
3017 Note that a record representing a justified modular type
3018 always represents a packed constrained array. */
3019 if (ecode == ARRAY_TYPE
3020 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3021 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3022 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
3023 return
3024 build_unary_op
3025 (INDIRECT_REF, NULL_TREE,
3026 convert_to_fat_pointer (TREE_TYPE (type),
3027 build_unary_op (ADDR_EXPR,
3028 NULL_TREE, expr)));
3029
3030 /* Do something very similar for converting one unconstrained
3031 array to another. */
3032 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3033 return
3034 build_unary_op (INDIRECT_REF, NULL_TREE,
3035 convert (TREE_TYPE (type),
3036 build_unary_op (ADDR_EXPR,
3037 NULL_TREE, expr)));
3038 else
3039 gcc_unreachable ();
3040
3041 case COMPLEX_TYPE:
3042 return fold (convert_to_complex (type, expr));
3043
3044 default:
3045 gcc_unreachable ();
3046 }
3047 }
3048 \f
3049 /* Remove all conversions that are done in EXP. This includes converting
3050 from a padded type or to a justified modular type. If TRUE_ADDRESS
3051 is true, always return the address of the containing object even if
3052 the address is not bit-aligned. */
3053
3054 tree
3055 remove_conversions (tree exp, bool true_address)
3056 {
3057 switch (TREE_CODE (exp))
3058 {
3059 case CONSTRUCTOR:
3060 if (true_address
3061 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3062 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3063 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), true);
3064 break;
3065
3066 case COMPONENT_REF:
3067 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3068 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3069 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3070 break;
3071
3072 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3073 case NOP_EXPR: case CONVERT_EXPR:
3074 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3075
3076 default:
3077 break;
3078 }
3079
3080 return exp;
3081 }
3082 \f
3083 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3084 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3085 likewise return an expression pointing to the underlying array. */
3086
3087 tree
3088 maybe_unconstrained_array (tree exp)
3089 {
3090 enum tree_code code = TREE_CODE (exp);
3091 tree new;
3092
3093 switch (TREE_CODE (TREE_TYPE (exp)))
3094 {
3095 case UNCONSTRAINED_ARRAY_TYPE:
3096 if (code == UNCONSTRAINED_ARRAY_REF)
3097 {
3098 new
3099 = build_unary_op (INDIRECT_REF, NULL_TREE,
3100 build_component_ref (TREE_OPERAND (exp, 0),
3101 get_identifier ("P_ARRAY"),
3102 NULL_TREE, false));
3103 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3104 return new;
3105 }
3106
3107 else if (code == NULL_EXPR)
3108 return build1 (NULL_EXPR,
3109 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3110 (TREE_TYPE (TREE_TYPE (exp))))),
3111 TREE_OPERAND (exp, 0));
3112
3113 case RECORD_TYPE:
3114 /* If this is a padded type, convert to the unpadded type and see if
3115 it contains a template. */
3116 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3117 {
3118 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3119 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3120 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3121 return
3122 build_component_ref (new, NULL_TREE,
3123 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3124 0);
3125 }
3126 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3127 return
3128 build_component_ref (exp, NULL_TREE,
3129 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3130 break;
3131
3132 default:
3133 break;
3134 }
3135
3136 return exp;
3137 }
3138 \f
3139 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3140 If NOTRUNC_P is true, truncation operations should be suppressed. */
3141
3142 tree
3143 unchecked_convert (tree type, tree expr, bool notrunc_p)
3144 {
3145 tree etype = TREE_TYPE (expr);
3146
3147 /* If the expression is already the right type, we are done. */
3148 if (etype == type)
3149 return expr;
3150
3151 /* If both types types are integral just do a normal conversion.
3152 Likewise for a conversion to an unconstrained array. */
3153 if ((((INTEGRAL_TYPE_P (type)
3154 && !(TREE_CODE (type) == INTEGER_TYPE
3155 && TYPE_VAX_FLOATING_POINT_P (type)))
3156 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3157 || (TREE_CODE (type) == RECORD_TYPE
3158 && TYPE_JUSTIFIED_MODULAR_P (type)))
3159 && ((INTEGRAL_TYPE_P (etype)
3160 && !(TREE_CODE (etype) == INTEGER_TYPE
3161 && TYPE_VAX_FLOATING_POINT_P (etype)))
3162 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3163 || (TREE_CODE (etype) == RECORD_TYPE
3164 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3165 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3166 {
3167 tree rtype = type;
3168
3169 if (TREE_CODE (etype) == INTEGER_TYPE
3170 && TYPE_BIASED_REPRESENTATION_P (etype))
3171 {
3172 tree ntype = copy_type (etype);
3173
3174 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3175 TYPE_MAIN_VARIANT (ntype) = ntype;
3176 expr = build1 (NOP_EXPR, ntype, expr);
3177 }
3178
3179 if (TREE_CODE (type) == INTEGER_TYPE
3180 && TYPE_BIASED_REPRESENTATION_P (type))
3181 {
3182 rtype = copy_type (type);
3183 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3184 TYPE_MAIN_VARIANT (rtype) = rtype;
3185 }
3186
3187 expr = convert (rtype, expr);
3188 if (type != rtype)
3189 expr = build1 (NOP_EXPR, type, expr);
3190 }
3191
3192 /* If we are converting TO an integral type whose precision is not the
3193 same as its size, first unchecked convert to a record that contains
3194 an object of the output type. Then extract the field. */
3195 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3196 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3197 GET_MODE_BITSIZE (TYPE_MODE (type))))
3198 {
3199 tree rec_type = make_node (RECORD_TYPE);
3200 tree field = create_field_decl (get_identifier ("OBJ"), type,
3201 rec_type, 1, 0, 0, 0);
3202
3203 TYPE_FIELDS (rec_type) = field;
3204 layout_type (rec_type);
3205
3206 expr = unchecked_convert (rec_type, expr, notrunc_p);
3207 expr = build_component_ref (expr, NULL_TREE, field, 0);
3208 }
3209
3210 /* Similarly for integral input type whose precision is not equal to its
3211 size. */
3212 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3213 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3214 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3215 {
3216 tree rec_type = make_node (RECORD_TYPE);
3217 tree field
3218 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3219 1, 0, 0, 0);
3220
3221 TYPE_FIELDS (rec_type) = field;
3222 layout_type (rec_type);
3223
3224 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3225 expr = unchecked_convert (type, expr, notrunc_p);
3226 }
3227
3228 /* We have a special case when we are converting between two
3229 unconstrained array types. In that case, take the address,
3230 convert the fat pointer types, and dereference. */
3231 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3232 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3233 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3234 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3235 build_unary_op (ADDR_EXPR, NULL_TREE,
3236 expr)));
3237 else
3238 {
3239 expr = maybe_unconstrained_array (expr);
3240
3241 /* There's no point in doing two unchecked conversions in a row. */
3242 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3243 expr = TREE_OPERAND (expr, 0);
3244
3245 etype = TREE_TYPE (expr);
3246 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3247 }
3248
3249 /* If the result is an integral type whose size is not equal to
3250 the size of the underlying machine type, sign- or zero-extend
3251 the result. We need not do this in the case where the input is
3252 an integral type of the same precision and signedness or if the output
3253 is a biased type or if both the input and output are unsigned. */
3254 if (!notrunc_p
3255 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3256 && !(TREE_CODE (type) == INTEGER_TYPE
3257 && TYPE_BIASED_REPRESENTATION_P (type))
3258 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3259 GET_MODE_BITSIZE (TYPE_MODE (type)))
3260 && !(INTEGRAL_TYPE_P (etype)
3261 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3262 && operand_equal_p (TYPE_RM_SIZE (type),
3263 (TYPE_RM_SIZE (etype) != 0
3264 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3265 0))
3266 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3267 {
3268 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3269 TYPE_UNSIGNED (type));
3270 tree shift_expr
3271 = convert (base_type,
3272 size_binop (MINUS_EXPR,
3273 bitsize_int
3274 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3275 TYPE_RM_SIZE (type)));
3276 expr
3277 = convert (type,
3278 build_binary_op (RSHIFT_EXPR, base_type,
3279 build_binary_op (LSHIFT_EXPR, base_type,
3280 convert (base_type, expr),
3281 shift_expr),
3282 shift_expr));
3283 }
3284
3285 /* An unchecked conversion should never raise Constraint_Error. The code
3286 below assumes that GCC's conversion routines overflow the same way that
3287 the underlying hardware does. This is probably true. In the rare case
3288 when it is false, we can rely on the fact that such conversions are
3289 erroneous anyway. */
3290 if (TREE_CODE (expr) == INTEGER_CST)
3291 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3292
3293 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3294 show no longer constant. */
3295 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3296 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3297 OEP_ONLY_CONST))
3298 TREE_CONSTANT (expr) = 0;
3299
3300 return expr;
3301 }
3302
3303 #include "gt-ada-utils.h"
3304 #include "gtype-ada.h"