trans.c (add_decl_expr): Clear TREE_READONLY if clear DECL_INITIAL.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41 #include "cgraph.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
63
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
67
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
70
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
73
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
76
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
81
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
84
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
87 goes later. */
88 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
89
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types[NUM_MACHINE_MODES];
92
93 /* For each binding contour we allocate a binding_level structure to indicate
94 the binding depth. */
95
96 struct gnat_binding_level GTY((chain_next ("%h.chain")))
97 {
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level *chain;
100 /* The BLOCK node for this level. */
101 tree block;
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
104 tree jmpbuf_decl;
105 };
106
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level *current_binding_level;
109
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
112
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable)) tree free_block_chain;
115
116 struct language_function GTY(())
117 {
118 int unused;
119 };
120
121 static void gnat_define_builtin (const char *, tree, int, const char *, bool);
122 static void gnat_install_builtins (void);
123 static tree merge_sizes (tree, tree, tree, bool, bool);
124 static tree compute_related_constant (tree, tree);
125 static tree split_plus (tree, tree *);
126 static bool value_zerop (tree);
127 static void gnat_gimplify_function (tree);
128 static void gnat_finalize (tree);
129 static tree float_type_for_precision (int, enum machine_mode);
130 static tree convert_to_fat_pointer (tree, tree);
131 static tree convert_to_thin_pointer (tree, tree);
132 static tree make_descriptor_field (const char *,tree, tree, tree);
133 static bool value_factor_p (tree, HOST_WIDE_INT);
134 static bool potential_alignment_gap (tree, tree, tree);
135 \f
136 /* Initialize the association of GNAT nodes to GCC trees. */
137
138 void
139 init_gnat_to_gnu (void)
140 {
141 associate_gnat_to_gnu
142 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
143 }
144
145 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
146 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
147 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
148
149 If GNU_DECL is zero, a previous association is to be reset. */
150
151 void
152 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
153 {
154 /* Check that GNAT_ENTITY is not already defined and that it is being set
155 to something which is a decl. Raise gigi 401 if not. Usually, this
156 means GNAT_ENTITY is defined twice, but occasionally is due to some
157 Gigi problem. */
158 if (gnu_decl
159 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
160 || (! no_check && ! DECL_P (gnu_decl))))
161 gigi_abort (401);
162
163 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
164 }
165
166 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
167 Return the ..._DECL node that was associated with it. If there is no tree
168 node associated with GNAT_ENTITY, abort.
169
170 In some cases, such as delayed elaboration or expressions that need to
171 be elaborated only once, GNAT_ENTITY is really not an entity. */
172
173 tree
174 get_gnu_tree (Entity_Id gnat_entity)
175 {
176 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
177 gigi_abort (402);
178
179 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
180 }
181
182 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
183
184 int
185 present_gnu_tree (Entity_Id gnat_entity)
186 {
187 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
188 }
189
190 \f
191 /* Return non-zero if we are currently in the global binding level. */
192
193 int
194 global_bindings_p (void)
195 {
196 return (force_global != 0 || current_binding_level == 0
197 || current_binding_level->chain == 0 ? -1 : 0);
198 }
199
200 /* Enter a new binding level. */
201
202 void
203 gnat_pushlevel ()
204 {
205 struct gnat_binding_level *newlevel = NULL;
206
207 /* Reuse a struct for this binding level, if there is one. */
208 if (free_binding_level)
209 {
210 newlevel = free_binding_level;
211 free_binding_level = free_binding_level->chain;
212 }
213 else
214 newlevel
215 = (struct gnat_binding_level *)
216 ggc_alloc (sizeof (struct gnat_binding_level));
217
218 /* Use a free BLOCK, if any; otherwise, allocate one. */
219 if (free_block_chain)
220 {
221 newlevel->block = free_block_chain;
222 free_block_chain = TREE_CHAIN (free_block_chain);
223 TREE_CHAIN (newlevel->block) = NULL_TREE;
224 }
225 else
226 newlevel->block = make_node (BLOCK);
227
228 /* Point the BLOCK we just made to its parent. */
229 if (current_binding_level)
230 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
231
232 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
233 TREE_USED (newlevel->block) = 1;
234
235 /* Add this level to the front of the chain (stack) of levels that are
236 active. */
237 newlevel->chain = current_binding_level;
238 newlevel->jmpbuf_decl = NULL_TREE;
239 current_binding_level = newlevel;
240 }
241
242 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
243 and point FNDECL to this BLOCK. */
244
245 void
246 set_current_block_context (tree fndecl)
247 {
248 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
249 DECL_INITIAL (fndecl) = current_binding_level->block;
250 }
251
252 /* Set the jmpbuf_decl for the current binding level to DECL. */
253
254 void
255 set_block_jmpbuf_decl (tree decl)
256 {
257 current_binding_level->jmpbuf_decl = decl;
258 }
259
260 /* Get the jmpbuf_decl, if any, for the current binding level. */
261
262 tree
263 get_block_jmpbuf_decl ()
264 {
265 return current_binding_level->jmpbuf_decl;
266 }
267
268 /* Exit a binding level. Set any BLOCK into the current code group. */
269
270 void
271 gnat_poplevel ()
272 {
273 struct gnat_binding_level *level = current_binding_level;
274 tree block = level->block;
275
276 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
277 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
278
279 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
280 are no variables free the block and merge its subblocks into those of its
281 parent block. Otherwise, add it to the list of its parent. */
282 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
283 ;
284 else if (BLOCK_VARS (block) == NULL_TREE)
285 {
286 BLOCK_SUBBLOCKS (level->chain->block)
287 = chainon (BLOCK_SUBBLOCKS (block),
288 BLOCK_SUBBLOCKS (level->chain->block));
289 TREE_CHAIN (block) = free_block_chain;
290 free_block_chain = block;
291 }
292 else
293 {
294 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
295 BLOCK_SUBBLOCKS (level->chain->block) = block;
296 TREE_USED (block) = 1;
297 set_block_for_group (block);
298 }
299
300 /* Free this binding structure. */
301 current_binding_level = level->chain;
302 level->chain = free_binding_level;
303 free_binding_level = level;
304 }
305
306 /* Insert BLOCK at the end of the list of subblocks of the
307 current binding level. This is used when a BIND_EXPR is expanded,
308 to handle the BLOCK node inside the BIND_EXPR. */
309
310 void
311 insert_block (tree block)
312 {
313 TREE_USED (block) = 1;
314 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
315 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
316 }
317 \f
318 /* Records a ..._DECL node DECL as belonging to the current lexical scope
319 and uses GNAT_NODE for location information. */
320
321 void
322 gnat_pushdecl (tree decl, Node_Id gnat_node)
323 {
324 /* If at top level, there is no context. But PARM_DECLs always go in the
325 level of its function. */
326 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
327 DECL_CONTEXT (decl) = 0;
328 else
329 DECL_CONTEXT (decl) = current_function_decl;
330
331 /* Set the location of DECL and emit a declaration for it. */
332 if (Present (gnat_node))
333 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
334 add_decl_expr (decl, gnat_node);
335
336 /* Put the declaration on the list. The list of declarations is in reverse
337 order. The list will be reversed later. We don't do this for global
338 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
339 the list. They will cause trouble with the debugger and aren't needed
340 anyway. */
341 if (!global_bindings_p ()
342 && (TREE_CODE (decl) != TYPE_DECL
343 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE))
344 {
345 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
346 BLOCK_VARS (current_binding_level->block) = decl;
347 }
348
349 /* For the declaration of a type, set its name if it either is not already
350 set, was set to an IDENTIFIER_NODE, indicating an internal name,
351 or if the previous type name was not derived from a source name.
352 We'd rather have the type named with a real name and all the pointer
353 types to the same object have the same POINTER_TYPE node. Code in this
354 function in c-decl.c makes a copy of the type node here, but that may
355 cause us trouble with incomplete types, so let's not try it (at least
356 for now). */
357
358 if (TREE_CODE (decl) == TYPE_DECL
359 && DECL_NAME (decl) != 0
360 && (TYPE_NAME (TREE_TYPE (decl)) == 0
361 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
362 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
363 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
364 && ! DECL_ARTIFICIAL (decl))))
365 TYPE_NAME (TREE_TYPE (decl)) = decl;
366
367 if (TREE_CODE (decl) != CONST_DECL)
368 rest_of_decl_compilation (decl, NULL, global_bindings_p (), 0);
369 }
370 \f
371 /* Do little here. Set up the standard declarations later after the
372 front end has been run. */
373
374 void
375 gnat_init_decl_processing (void)
376 {
377 input_line = 0;
378
379 /* Make the binding_level structure for global names. */
380 current_function_decl = 0;
381 current_binding_level = 0;
382 free_binding_level = 0;
383 gnat_pushlevel ();
384
385 build_common_tree_nodes (0);
386
387 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
388 corresponding to the size of Pmode. In most cases when ptr_mode and
389 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
390 far better code using the width of Pmode. Make this here since we need
391 this before we can expand the GNAT types. */
392 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
393 set_sizetype (size_type_node);
394 build_common_tree_nodes_2 (0);
395
396 /* Give names and make TYPE_DECLs for common types. */
397 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
398 Empty);
399 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
400 integer_type_node),
401 Empty);
402 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
403 char_type_node),
404 Empty);
405 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
406 long_integer_type_node),
407 Empty);
408
409 ptr_void_type_node = build_pointer_type (void_type_node);
410
411 gnat_install_builtins ();
412 }
413
414 /* Define a builtin function. This is temporary and is just being done
415 to initialize implicit_built_in_decls for the middle-end. We'll want
416 to do full builtin processing soon. */
417
418 static void
419 gnat_define_builtin (const char *name, tree type,
420 int function_code, const char *library_name, bool const_p)
421 {
422 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
423
424 DECL_EXTERNAL (decl) = 1;
425 TREE_PUBLIC (decl) = 1;
426 if (library_name)
427 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
428 make_decl_rtl (decl, NULL);
429 gnat_pushdecl (decl, Empty);
430 DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
431 DECL_FUNCTION_CODE (decl) = function_code;
432 TREE_READONLY (decl) = const_p;
433
434 implicit_built_in_decls[function_code] = decl;
435 }
436
437 /* Install the builtin functions the middle-end needs. */
438
439 static void
440 gnat_install_builtins ()
441 {
442 tree ftype;
443 tree tmp;
444
445 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
446 tmp = tree_cons (NULL_TREE, long_integer_type_node, tmp);
447 ftype = build_function_type (long_integer_type_node, tmp);
448 gnat_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT,
449 "__builtin_expect", true);
450
451 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
452 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
453 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
454 ftype = build_function_type (ptr_void_type_node, tmp);
455 gnat_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
456 "memcpy", false);
457
458 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
459 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
460 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
461 ftype = build_function_type (integer_type_node, tmp);
462 gnat_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP,
463 "memcmp", false);
464
465 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
466 tmp = tree_cons (NULL_TREE, integer_type_node, tmp);
467 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
468 ftype = build_function_type (integer_type_node, tmp);
469 gnat_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET,
470 "memset", false);
471
472 tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
473 ftype = build_function_type (integer_type_node, tmp);
474 gnat_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "clz", true);
475
476 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
477 ftype = build_function_type (integer_type_node, tmp);
478 gnat_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "clzl", true);
479
480 tmp = tree_cons (NULL_TREE, long_long_integer_type_node, void_list_node);
481 ftype = build_function_type (integer_type_node, tmp);
482 gnat_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "clzll",
483 true);
484
485 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
486 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
487 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
488 ftype = build_function_type (void_type_node, tmp);
489 gnat_define_builtin ("__builtin_init_trampoline", ftype,
490 BUILT_IN_INIT_TRAMPOLINE, "init_trampoline", false);
491
492 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
493 ftype = build_function_type (ptr_void_type_node, tmp);
494 gnat_define_builtin ("__builtin_adjust_trampoline", ftype,
495 BUILT_IN_ADJUST_TRAMPOLINE, "adjust_trampoline", true);
496
497 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
498 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
499 ftype = build_function_type (ptr_void_type_node, tmp);
500 gnat_define_builtin ("__builtin_stack_alloc", ftype, BUILT_IN_STACK_ALLOC,
501 "stack_alloc", false);
502
503 /* The stack_save and stack_restore builtins aren't used directly. They
504 are inserted during gimplification to implement stack_alloc calls. */
505 ftype = build_function_type (ptr_void_type_node, void_list_node);
506 gnat_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
507 "stack_save", false);
508 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
509 ftype = build_function_type (void_type_node, tmp);
510 gnat_define_builtin ("__builtin_stack_restore", ftype,
511 BUILT_IN_STACK_RESTORE, "stack_restore", false);
512 }
513
514 /* Create the predefined scalar types such as `integer_type_node' needed
515 in the gcc back-end and initialize the global binding level. */
516
517 void
518 init_gigi_decls (tree long_long_float_type, tree exception_type)
519 {
520 tree endlink, decl;
521 unsigned int i;
522
523 /* Set the types that GCC and Gigi use from the front end. We would like
524 to do this for char_type_node, but it needs to correspond to the C
525 char type. */
526 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
527 {
528 /* In this case, the builtin floating point types are VAX float,
529 so make up a type for use. */
530 longest_float_type_node = make_node (REAL_TYPE);
531 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
532 layout_type (longest_float_type_node);
533 create_type_decl (get_identifier ("longest float type"),
534 longest_float_type_node, NULL, 0, 1, Empty);
535 }
536 else
537 longest_float_type_node = TREE_TYPE (long_long_float_type);
538
539 except_type_node = TREE_TYPE (exception_type);
540
541 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
542 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
543 NULL, 0, 1, Empty);
544
545 void_type_decl_node = create_type_decl (get_identifier ("void"),
546 void_type_node, NULL, 0, 1, Empty);
547
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
550
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
553
554 /* malloc is a function declaration tree for a function to allocate
555 memory. */
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
557 NULL_TREE,
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
560 sizetype,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0, Empty);
563
564 /* free is a function declaration tree for a function to free memory. */
565 free_decl
566 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
567 build_function_type (void_type_node,
568 tree_cons (NULL_TREE,
569 ptr_void_type_node,
570 endlink)),
571 NULL_TREE, 0, 1, 1, 0, Empty);
572
573 /* Make the types and functions used for exception processing. */
574 jmpbuf_type
575 = build_array_type (gnat_type_for_mode (Pmode, 0),
576 build_index_type (build_int_2 (5, 0)));
577 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
578 0, 1, Empty);
579 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
580
581 /* Functions to get and set the jumpbuf pointer for the current thread. */
582 get_jmpbuf_decl
583 = create_subprog_decl
584 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
585 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
586 NULL_TREE, 0, 1, 1, 0, Empty);
587
588 set_jmpbuf_decl
589 = create_subprog_decl
590 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
591 NULL_TREE,
592 build_function_type (void_type_node,
593 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
594 NULL_TREE, 0, 1, 1, 0, Empty);
595
596 /* Function to get the current exception. */
597 get_excptr_decl
598 = create_subprog_decl
599 (get_identifier ("system__soft_links__get_gnat_exception"),
600 NULL_TREE,
601 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
602 NULL_TREE, 0, 1, 1, 0, Empty);
603
604 /* Functions that raise exceptions. */
605 raise_nodefer_decl
606 = create_subprog_decl
607 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
608 build_function_type (void_type_node,
609 tree_cons (NULL_TREE,
610 build_pointer_type (except_type_node),
611 endlink)),
612 NULL_TREE, 0, 1, 1, 0, Empty);
613
614 /* Hooks to call when entering/leaving an exception handler. */
615 begin_handler_decl
616 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
617 build_function_type (void_type_node,
618 tree_cons (NULL_TREE,
619 ptr_void_type_node,
620 endlink)),
621 NULL_TREE, 0, 1, 1, 0, Empty);
622
623 end_handler_decl
624 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
625 build_function_type (void_type_node,
626 tree_cons (NULL_TREE,
627 ptr_void_type_node,
628 endlink)),
629 NULL_TREE, 0, 1, 1, 0, Empty);
630
631 /* If in no exception handlers mode, all raise statements are redirected to
632 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
633 this procedure will never be called in this mode. */
634 if (No_Exception_Handlers_Set ())
635 {
636 decl
637 = create_subprog_decl
638 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
639 build_function_type (void_type_node,
640 tree_cons (NULL_TREE,
641 build_pointer_type (char_type_node),
642 tree_cons (NULL_TREE,
643 integer_type_node,
644 endlink))),
645 NULL_TREE, 0, 1, 1, 0, Empty);
646
647 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
648 gnat_raise_decls[i] = decl;
649 }
650 else
651 /* Otherwise, make one decl for each exception reason. */
652 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
653 {
654 char name[17];
655
656 sprintf (name, "__gnat_rcheck_%.2d", i);
657 gnat_raise_decls[i]
658 = create_subprog_decl
659 (get_identifier (name), NULL_TREE,
660 build_function_type (void_type_node,
661 tree_cons (NULL_TREE,
662 build_pointer_type
663 (char_type_node),
664 tree_cons (NULL_TREE,
665 integer_type_node,
666 endlink))),
667 NULL_TREE, 0, 1, 1, 0, Empty);
668 }
669
670 /* Indicate that these never return. */
671 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
672 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
673 TREE_TYPE (raise_nodefer_decl)
674 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
675 TYPE_QUAL_VOLATILE);
676
677 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
678 {
679 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
680 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
681 TREE_TYPE (gnat_raise_decls[i])
682 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
683 TYPE_QUAL_VOLATILE);
684 }
685
686 /* setjmp returns an integer and has one operand, which is a pointer to
687 a jmpbuf. */
688 setjmp_decl
689 = create_subprog_decl
690 (get_identifier ("__builtin_setjmp"), NULL_TREE,
691 build_function_type (integer_type_node,
692 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
693 NULL_TREE, 0, 1, 1, 0, Empty);
694
695 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
696 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
697
698 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
699 address. */
700 update_setjmp_buf_decl
701 = create_subprog_decl
702 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
703 build_function_type (void_type_node,
704 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
705 NULL_TREE, 0, 1, 1, 0, Empty);
706
707 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
708 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
709
710 main_identifier_node = get_identifier ("main");
711 }
712 \f
713 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
714 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
715 nonzero, this record has a rep clause; don't call layout_type but merely set
716 the size and alignment ourselves. If DEFER_DEBUG is nonzero, do not call
717 the debugging routines on this type; it will be done later. */
718
719 void
720 finish_record_type (tree record_type, tree fieldlist, int has_rep,
721 int defer_debug)
722 {
723 enum tree_code code = TREE_CODE (record_type);
724 tree ada_size = bitsize_zero_node;
725 tree size = bitsize_zero_node;
726 tree size_unit = size_zero_node;
727 int var_size = 0;
728 tree field;
729
730 TYPE_FIELDS (record_type) = fieldlist;
731 TYPE_STUB_DECL (record_type)
732 = build_decl (TYPE_DECL, NULL_TREE, record_type);
733
734 /* We don't need both the typedef name and the record name output in
735 the debugging information, since they are the same. */
736 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
737
738 /* Globally initialize the record first. If this is a rep'ed record,
739 that just means some initializations; otherwise, layout the record. */
740
741 if (has_rep)
742 {
743 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
744 TYPE_MODE (record_type) = BLKmode;
745 if (TYPE_SIZE (record_type) == 0)
746 {
747 TYPE_SIZE (record_type) = bitsize_zero_node;
748 TYPE_SIZE_UNIT (record_type) = size_zero_node;
749 }
750 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
751 out just like a UNION_TYPE, since the size will be fixed. */
752 else if (code == QUAL_UNION_TYPE)
753 code = UNION_TYPE;
754 }
755 else
756 {
757 /* Ensure there isn't a size already set. There can be in an error
758 case where there is a rep clause but all fields have errors and
759 no longer have a position. */
760 TYPE_SIZE (record_type) = 0;
761 layout_type (record_type);
762 }
763
764 /* At this point, the position and size of each field is known. It was
765 either set before entry by a rep clause, or by laying out the type above.
766
767 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
768 to compute the Ada size; the GCC size and alignment (for rep'ed records
769 that are not padding types); and the mode (for rep'ed records). We also
770 clear the DECL_BIT_FIELD indication for the cases we know have not been
771 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
772
773 if (code == QUAL_UNION_TYPE)
774 fieldlist = nreverse (fieldlist);
775
776 for (field = fieldlist; field; field = TREE_CHAIN (field))
777 {
778 tree pos = bit_position (field);
779
780 tree type = TREE_TYPE (field);
781 tree this_size = DECL_SIZE (field);
782 tree this_size_unit = DECL_SIZE_UNIT (field);
783 tree this_ada_size = DECL_SIZE (field);
784
785 /* We need to make an XVE/XVU record if any field has variable size,
786 whether or not the record does. For example, if we have an union,
787 it may be that all fields, rounded up to the alignment, have the
788 same size, in which case we'll use that size. But the debug
789 output routines (except Dwarf2) won't be able to output the fields,
790 so we need to make the special record. */
791 if (TREE_CODE (this_size) != INTEGER_CST)
792 var_size = 1;
793
794 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
795 || TREE_CODE (type) == QUAL_UNION_TYPE)
796 && ! TYPE_IS_FAT_POINTER_P (type)
797 && ! TYPE_CONTAINS_TEMPLATE_P (type)
798 && TYPE_ADA_SIZE (type) != 0)
799 this_ada_size = TYPE_ADA_SIZE (type);
800
801 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
802 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
803 && value_factor_p (pos, BITS_PER_UNIT)
804 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
805 DECL_BIT_FIELD (field) = 0;
806
807 /* If we still have DECL_BIT_FIELD set at this point, we know the field
808 is technically not addressable. Except that it can actually be
809 addressed if the field is BLKmode and happens to be properly
810 aligned. */
811 DECL_NONADDRESSABLE_P (field)
812 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
813
814 if (has_rep && ! DECL_BIT_FIELD (field))
815 TYPE_ALIGN (record_type)
816 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
817
818 switch (code)
819 {
820 case UNION_TYPE:
821 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
822 size = size_binop (MAX_EXPR, size, this_size);
823 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
824 break;
825
826 case QUAL_UNION_TYPE:
827 ada_size
828 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
829 this_ada_size, ada_size));
830 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
831 this_size, size));
832 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
833 this_size_unit, size_unit));
834 break;
835
836 case RECORD_TYPE:
837 /* Since we know here that all fields are sorted in order of
838 increasing bit position, the size of the record is one
839 higher than the ending bit of the last field processed
840 unless we have a rep clause, since in that case we might
841 have a field outside a QUAL_UNION_TYPE that has a higher ending
842 position. So use a MAX in that case. Also, if this field is a
843 QUAL_UNION_TYPE, we need to take into account the previous size in
844 the case of empty variants. */
845 ada_size
846 = merge_sizes (ada_size, pos, this_ada_size,
847 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
848 size = merge_sizes (size, pos, this_size,
849 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
850 size_unit
851 = merge_sizes (size_unit, byte_position (field), this_size_unit,
852 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
853 break;
854
855 default:
856 abort ();
857 }
858 }
859
860 if (code == QUAL_UNION_TYPE)
861 nreverse (fieldlist);
862
863 /* If this is a padding record, we never want to make the size smaller than
864 what was specified in it, if any. */
865 if (TREE_CODE (record_type) == RECORD_TYPE
866 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
867 {
868 size = TYPE_SIZE (record_type);
869 size_unit = TYPE_SIZE_UNIT (record_type);
870 }
871
872 /* Now set any of the values we've just computed that apply. */
873 if (! TYPE_IS_FAT_POINTER_P (record_type)
874 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
875 SET_TYPE_ADA_SIZE (record_type, ada_size);
876
877 if (has_rep)
878 {
879 if (! (TREE_CODE (record_type) == RECORD_TYPE
880 && TYPE_IS_PADDING_P (record_type)
881 && CONTAINS_PLACEHOLDER_P (size)))
882 {
883 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
884 TYPE_SIZE_UNIT (record_type)
885 = round_up (size_unit,
886 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
887 }
888
889 compute_record_mode (record_type);
890 }
891
892 if (! defer_debug)
893 {
894 /* If this record is of variable size, rename it so that the
895 debugger knows it is and make a new, parallel, record
896 that tells the debugger how the record is laid out. See
897 exp_dbug.ads. But don't do this for records that are padding
898 since they confuse GDB. */
899 if (var_size
900 && ! (TREE_CODE (record_type) == RECORD_TYPE
901 && TYPE_IS_PADDING_P (record_type)))
902 {
903 tree new_record_type
904 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
905 ? UNION_TYPE : TREE_CODE (record_type));
906 tree orig_name = TYPE_NAME (record_type);
907 tree orig_id
908 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
909 : orig_name);
910 tree new_id
911 = concat_id_with_name (orig_id,
912 TREE_CODE (record_type) == QUAL_UNION_TYPE
913 ? "XVU" : "XVE");
914 tree last_pos = bitsize_zero_node;
915 tree old_field;
916 tree prev_old_field = 0;
917
918 TYPE_NAME (new_record_type) = new_id;
919 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
920 TYPE_STUB_DECL (new_record_type)
921 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
922 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
923 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
924 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
925 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
926
927 /* Now scan all the fields, replacing each field with a new
928 field corresponding to the new encoding. */
929 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
930 old_field = TREE_CHAIN (old_field))
931 {
932 tree field_type = TREE_TYPE (old_field);
933 tree field_name = DECL_NAME (old_field);
934 tree new_field;
935 tree curpos = bit_position (old_field);
936 int var = 0;
937 unsigned int align = 0;
938 tree pos;
939
940 /* See how the position was modified from the last position.
941
942 There are two basic cases we support: a value was added
943 to the last position or the last position was rounded to
944 a boundary and they something was added. Check for the
945 first case first. If not, see if there is any evidence
946 of rounding. If so, round the last position and try
947 again.
948
949 If this is a union, the position can be taken as zero. */
950
951 if (TREE_CODE (new_record_type) == UNION_TYPE)
952 pos = bitsize_zero_node, align = 0;
953 else
954 pos = compute_related_constant (curpos, last_pos);
955
956 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
957 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
958 {
959 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
960 pos = compute_related_constant (curpos,
961 round_up (last_pos, align));
962 }
963 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
964 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
965 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
966 && host_integerp (TREE_OPERAND
967 (TREE_OPERAND (curpos, 0), 1),
968 1))
969 {
970 align
971 = tree_low_cst
972 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
973 pos = compute_related_constant (curpos,
974 round_up (last_pos, align));
975 }
976 else if (potential_alignment_gap (prev_old_field, old_field,
977 pos))
978 {
979 align = TYPE_ALIGN (field_type);
980 pos = compute_related_constant (curpos,
981 round_up (last_pos, align));
982 }
983
984 /* If we can't compute a position, set it to zero.
985
986 ??? We really should abort here, but it's too much work
987 to get this correct for all cases. */
988
989 if (pos == 0)
990 pos = bitsize_zero_node;
991
992 /* See if this type is variable-size and make a new type
993 and indicate the indirection if so. */
994 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
995 {
996 field_type = build_pointer_type (field_type);
997 var = 1;
998 }
999
1000 /* Make a new field name, if necessary. */
1001 if (var || align != 0)
1002 {
1003 char suffix[6];
1004
1005 if (align != 0)
1006 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1007 align / BITS_PER_UNIT);
1008 else
1009 strcpy (suffix, "XVL");
1010
1011 field_name = concat_id_with_name (field_name, suffix);
1012 }
1013
1014 new_field = create_field_decl (field_name, field_type,
1015 new_record_type, 0,
1016 DECL_SIZE (old_field), pos, 0);
1017 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1018 TYPE_FIELDS (new_record_type) = new_field;
1019
1020 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1021 zero. The only time it's not the last field of the record
1022 is when there are other components at fixed positions after
1023 it (meaning there was a rep clause for every field) and we
1024 want to be able to encode them. */
1025 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1026 (TREE_CODE (TREE_TYPE (old_field))
1027 == QUAL_UNION_TYPE)
1028 ? bitsize_zero_node
1029 : DECL_SIZE (old_field));
1030 prev_old_field = old_field;
1031 }
1032
1033 TYPE_FIELDS (new_record_type)
1034 = nreverse (TYPE_FIELDS (new_record_type));
1035
1036 rest_of_type_compilation (new_record_type, global_bindings_p ());
1037 }
1038
1039 rest_of_type_compilation (record_type, global_bindings_p ());
1040 }
1041 }
1042
1043 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1044 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1045 if this represents a QUAL_UNION_TYPE in which case we must look for
1046 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1047 is nonzero, we must take the MAX of the end position of this field
1048 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1049
1050 We return an expression for the size. */
1051
1052 static tree
1053 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1054 bool has_rep)
1055 {
1056 tree type = TREE_TYPE (last_size);
1057 tree new;
1058
1059 if (! special || TREE_CODE (size) != COND_EXPR)
1060 {
1061 new = size_binop (PLUS_EXPR, first_bit, size);
1062 if (has_rep)
1063 new = size_binop (MAX_EXPR, last_size, new);
1064 }
1065
1066 else
1067 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1068 integer_zerop (TREE_OPERAND (size, 1))
1069 ? last_size : merge_sizes (last_size, first_bit,
1070 TREE_OPERAND (size, 1),
1071 1, has_rep),
1072 integer_zerop (TREE_OPERAND (size, 2))
1073 ? last_size : merge_sizes (last_size, first_bit,
1074 TREE_OPERAND (size, 2),
1075 1, has_rep)));
1076
1077 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1078 when fed through substitute_in_expr) into thinking that a constant
1079 size is not constant. */
1080 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1081 new = TREE_OPERAND (new, 0);
1082
1083 return new;
1084 }
1085
1086 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1087 related by the addition of a constant. Return that constant if so. */
1088
1089 static tree
1090 compute_related_constant (tree op0, tree op1)
1091 {
1092 tree op0_var, op1_var;
1093 tree op0_con = split_plus (op0, &op0_var);
1094 tree op1_con = split_plus (op1, &op1_var);
1095 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1096
1097 if (operand_equal_p (op0_var, op1_var, 0))
1098 return result;
1099 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1100 return result;
1101 else
1102 return 0;
1103 }
1104
1105 /* Utility function of above to split a tree OP which may be a sum, into a
1106 constant part, which is returned, and a variable part, which is stored
1107 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1108 bitsizetype. */
1109
1110 static tree
1111 split_plus (tree in, tree *pvar)
1112 {
1113 /* Strip NOPS in order to ease the tree traversal and maximize the
1114 potential for constant or plus/minus discovery. We need to be careful
1115 to always return and set *pvar to bitsizetype trees, but it's worth
1116 the effort. */
1117 STRIP_NOPS (in);
1118
1119 *pvar = convert (bitsizetype, in);
1120
1121 if (TREE_CODE (in) == INTEGER_CST)
1122 {
1123 *pvar = bitsize_zero_node;
1124 return convert (bitsizetype, in);
1125 }
1126 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1127 {
1128 tree lhs_var, rhs_var;
1129 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1130 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1131
1132 if (lhs_var == TREE_OPERAND (in, 0)
1133 && rhs_var == TREE_OPERAND (in, 1))
1134 return bitsize_zero_node;
1135
1136 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1137 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1138 }
1139 else
1140 return bitsize_zero_node;
1141 }
1142 \f
1143 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1144 subprogram. If it is void_type_node, then we are dealing with a procedure,
1145 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1146 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1147 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1148 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1149 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1150 RETURNS_WITH_DSP is nonzero if the function is to return with a
1151 depressed stack pointer. */
1152 tree
1153 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1154 int returns_unconstrained, int returns_by_ref,
1155 int returns_with_dsp)
1156 {
1157 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1158 the subprogram formal parameters. This list is generated by traversing the
1159 input list of PARM_DECL nodes. */
1160 tree param_type_list = NULL;
1161 tree param_decl;
1162 tree type;
1163
1164 for (param_decl = param_decl_list; param_decl;
1165 param_decl = TREE_CHAIN (param_decl))
1166 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1167 param_type_list);
1168
1169 /* The list of the function parameter types has to be terminated by the void
1170 type to signal to the back-end that we are not dealing with a variable
1171 parameter subprogram, but that the subprogram has a fixed number of
1172 parameters. */
1173 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1174
1175 /* The list of argument types has been created in reverse
1176 so nreverse it. */
1177 param_type_list = nreverse (param_type_list);
1178
1179 type = build_function_type (return_type, param_type_list);
1180
1181 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1182 or the new type should, make a copy of TYPE. Likewise for
1183 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1184 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1185 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1186 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1187 type = copy_type (type);
1188
1189 SET_TYPE_CI_CO_LIST (type, cico_list);
1190 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1191 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1192 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1193 return type;
1194 }
1195 \f
1196 /* Return a copy of TYPE but safe to modify in any way. */
1197
1198 tree
1199 copy_type (tree type)
1200 {
1201 tree new = copy_node (type);
1202
1203 /* copy_node clears this field instead of copying it, because it is
1204 aliased with TREE_CHAIN. */
1205 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1206
1207 TYPE_POINTER_TO (new) = 0;
1208 TYPE_REFERENCE_TO (new) = 0;
1209 TYPE_MAIN_VARIANT (new) = new;
1210 TYPE_NEXT_VARIANT (new) = 0;
1211
1212 return new;
1213 }
1214 \f
1215 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1216 TYPE_INDEX_TYPE is INDEX. */
1217
1218 tree
1219 create_index_type (tree min, tree max, tree index)
1220 {
1221 /* First build a type for the desired range. */
1222 tree type = build_index_2_type (min, max);
1223
1224 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1225 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1226 is set, but not to INDEX, make a copy of this type with the requested
1227 index type. Note that we have no way of sharing these types, but that's
1228 only a small hole. */
1229 if (TYPE_INDEX_TYPE (type) == index)
1230 return type;
1231 else if (TYPE_INDEX_TYPE (type) != 0)
1232 type = copy_type (type);
1233
1234 SET_TYPE_INDEX_TYPE (type, index);
1235 create_type_decl (NULL_TREE, type, NULL, 1, 0, Empty);
1236 return type;
1237 }
1238 \f
1239 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1240 string) and TYPE is a ..._TYPE node giving its data type.
1241 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1242 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1243 information about this type. GNAT_NODE is used for the position of
1244 the decl. */
1245
1246 tree
1247 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1248 int artificial_p, int debug_info_p, Node_Id gnat_node)
1249 {
1250 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1251 enum tree_code code = TREE_CODE (type);
1252
1253 DECL_ARTIFICIAL (type_decl) = artificial_p;
1254
1255 process_attributes (type_decl, attr_list);
1256
1257 /* Pass type declaration information to the debugger unless this is an
1258 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1259 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1260 a dummy type, which will be completed later, or a type for which
1261 debugging information was not requested. */
1262 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1263 || ! debug_info_p)
1264 DECL_IGNORED_P (type_decl) = 1;
1265 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1266 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1267 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1268 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1269
1270 if (!TYPE_IS_DUMMY_P (type))
1271 gnat_pushdecl (type_decl, gnat_node);
1272
1273 return type_decl;
1274 }
1275
1276 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1277 ASM_NAME is its assembler name (if provided). TYPE is its data type
1278 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1279 expression; NULL_TREE if none.
1280
1281 CONST_FLAG is nonzero if this variable is constant.
1282
1283 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1284 the current compilation unit. This flag should be set when processing the
1285 variable definitions in a package specification. EXTERN_FLAG is nonzero
1286 when processing an external variable declaration (as opposed to a
1287 definition: no storage is to be allocated for the variable here).
1288
1289 STATIC_FLAG is only relevant when not at top level. In that case
1290 it indicates whether to always allocate storage to the variable.
1291
1292 GNAT_NODE is used for the position of the decl. */
1293
1294 tree
1295 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1296 int const_flag, int public_flag, int extern_flag,
1297 int static_flag, struct attrib *attr_list, Node_Id gnat_node)
1298 {
1299 int init_const
1300 = (var_init == 0
1301 ? 0
1302 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1303 && (global_bindings_p () || static_flag
1304 ? 0 != initializer_constant_valid_p (var_init,
1305 TREE_TYPE (var_init))
1306 : TREE_CONSTANT (var_init))));
1307 tree var_decl
1308 = build_decl ((const_flag && init_const
1309 /* Only make a CONST_DECL for sufficiently-small objects.
1310 We consider complex double "sufficiently-small" */
1311 && TYPE_SIZE (type) != 0
1312 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1313 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1314 GET_MODE_SIZE (DCmode)))
1315 ? CONST_DECL : VAR_DECL, var_name, type);
1316
1317 /* If this is external, throw away any initializations unless this is a
1318 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1319 If we are defining a global here, leave a constant initialization and
1320 save any variable elaborations for the elaboration routine. If we are
1321 just annotating types, throw away the initialization if it isn't a
1322 constant. */
1323 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1324 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1325 var_init = 0;
1326
1327 DECL_INITIAL (var_decl) = var_init;
1328 TREE_READONLY (var_decl) = const_flag;
1329 DECL_EXTERNAL (var_decl) = extern_flag;
1330 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1331 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1332 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1333 = TYPE_VOLATILE (type);
1334
1335 /* At the global binding level we need to allocate static storage for the
1336 variable if and only if its not external. If we are not at the top level
1337 we allocate automatic storage unless requested not to. */
1338 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1339
1340 if (asm_name != 0)
1341 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1342
1343 process_attributes (var_decl, attr_list);
1344
1345 /* Add this decl to the current binding level. */
1346 gnat_pushdecl (var_decl, gnat_node);
1347
1348 if (TREE_SIDE_EFFECTS (var_decl))
1349 TREE_ADDRESSABLE (var_decl) = 1;
1350
1351 if (TREE_CODE (var_decl) != CONST_DECL)
1352 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1353
1354 return var_decl;
1355 }
1356 \f
1357 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1358 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1359 this field is in a record type with a "pragma pack". If SIZE is nonzero
1360 it is the specified size for this field. If POS is nonzero, it is the bit
1361 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1362 the address of this field for aliasing purposes. */
1363
1364 tree
1365 create_field_decl (tree field_name, tree field_type, tree record_type,
1366 int packed, tree size, tree pos, int addressable)
1367 {
1368 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1369
1370 DECL_CONTEXT (field_decl) = record_type;
1371 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1372
1373 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1374 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1375 if (packed && TYPE_MODE (field_type) == BLKmode)
1376 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1377
1378 /* If a size is specified, use it. Otherwise, if the record type is packed
1379 compute a size to use, which may differ from the object's natural size.
1380 We always set a size in this case to trigger the checks for bitfield
1381 creation below, which is typically required when no position has been
1382 specified. */
1383 if (size != 0)
1384 size = convert (bitsizetype, size);
1385 else if (packed == 1)
1386 {
1387 size = rm_size (field_type);
1388
1389 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1390 byte. */
1391 if (TREE_CODE (size) == INTEGER_CST
1392 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1393 size = round_up (size, BITS_PER_UNIT);
1394 }
1395
1396 /* Make a bitfield if a size is specified for two reasons: first if the size
1397 differs from the natural size. Second, if the alignment is insufficient.
1398 There are a number of ways the latter can be true.
1399
1400 We never make a bitfield if the type of the field has a nonconstant size,
1401 or if it is claimed to be addressable, because no such entity requiring
1402 bitfield operations should reach here.
1403
1404 We do *preventively* make a bitfield when there might be the need for it
1405 but we don't have all the necessary information to decide, as is the case
1406 of a field with no specified position in a packed record.
1407
1408 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1409 in layout_decl or finish_record_type to clear the bit_field indication if
1410 it is in fact not needed. */
1411 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1412 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1413 && ! addressable
1414 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1415 || (pos != 0
1416 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1417 bitsize_int (TYPE_ALIGN
1418 (field_type)))))
1419 || packed
1420 || (TYPE_ALIGN (record_type) != 0
1421 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1422 {
1423 DECL_BIT_FIELD (field_decl) = 1;
1424 DECL_SIZE (field_decl) = size;
1425 if (! packed && pos == 0)
1426 DECL_ALIGN (field_decl)
1427 = (TYPE_ALIGN (record_type) != 0
1428 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1429 : TYPE_ALIGN (field_type));
1430 }
1431
1432 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1433 DECL_ALIGN (field_decl)
1434 = MAX (DECL_ALIGN (field_decl),
1435 DECL_BIT_FIELD (field_decl) ? 1
1436 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1437 : TYPE_ALIGN (field_type));
1438
1439 if (pos != 0)
1440 {
1441 /* We need to pass in the alignment the DECL is known to have.
1442 This is the lowest-order bit set in POS, but no more than
1443 the alignment of the record, if one is specified. Note
1444 that an alignment of 0 is taken as infinite. */
1445 unsigned int known_align;
1446
1447 if (host_integerp (pos, 1))
1448 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1449 else
1450 known_align = BITS_PER_UNIT;
1451
1452 if (TYPE_ALIGN (record_type)
1453 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1454 known_align = TYPE_ALIGN (record_type);
1455
1456 layout_decl (field_decl, known_align);
1457 SET_DECL_OFFSET_ALIGN (field_decl,
1458 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1459 : BITS_PER_UNIT);
1460 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1461 &DECL_FIELD_BIT_OFFSET (field_decl),
1462 DECL_OFFSET_ALIGN (field_decl), pos);
1463
1464 DECL_HAS_REP_P (field_decl) = 1;
1465 }
1466
1467 /* If the field type is passed by reference, we will have pointers to the
1468 field, so it is addressable. */
1469 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1470 addressable = 1;
1471
1472 /* ??? For now, we say that any field of aggregate type is addressable
1473 because the front end may take 'Reference of it. */
1474 if (AGGREGATE_TYPE_P (field_type))
1475 addressable = 1;
1476
1477 /* Mark the decl as nonaddressable if it is indicated so semantically,
1478 meaning we won't ever attempt to take the address of the field.
1479
1480 It may also be "technically" nonaddressable, meaning that even if we
1481 attempt to take the field's address we will actually get the address of a
1482 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1483 we have at this point is not accurate enough, so we don't account for
1484 this here and let finish_record_type decide. */
1485 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1486
1487 return field_decl;
1488 }
1489
1490 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1491 effects, has the value of zero. */
1492
1493 static bool
1494 value_zerop (tree exp)
1495 {
1496 if (TREE_CODE (exp) == COMPOUND_EXPR)
1497 return value_zerop (TREE_OPERAND (exp, 1));
1498
1499 return integer_zerop (exp);
1500 }
1501 \f
1502 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1503 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1504 readonly (either an IN parameter or an address of a pass-by-ref
1505 parameter). */
1506
1507 tree
1508 create_param_decl (tree param_name, tree param_type, int readonly)
1509 {
1510 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1511
1512 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1513 lead to various ABI violations. */
1514 if (targetm.calls.promote_prototypes (param_type)
1515 && (TREE_CODE (param_type) == INTEGER_TYPE
1516 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1517 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1518 {
1519 /* We have to be careful about biased types here. Make a subtype
1520 of integer_type_node with the proper biasing. */
1521 if (TREE_CODE (param_type) == INTEGER_TYPE
1522 && TYPE_BIASED_REPRESENTATION_P (param_type))
1523 {
1524 param_type
1525 = copy_type (build_range_type (integer_type_node,
1526 TYPE_MIN_VALUE (param_type),
1527 TYPE_MAX_VALUE (param_type)));
1528
1529 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1530 }
1531 else
1532 param_type = integer_type_node;
1533 }
1534
1535 DECL_ARG_TYPE (param_decl) = param_type;
1536 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1537 TREE_READONLY (param_decl) = readonly;
1538 return param_decl;
1539 }
1540 \f
1541 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1542
1543 void
1544 process_attributes (tree decl, struct attrib *attr_list)
1545 {
1546 for (; attr_list; attr_list = attr_list->next)
1547 switch (attr_list->type)
1548 {
1549 case ATTR_MACHINE_ATTRIBUTE:
1550 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1551 NULL_TREE),
1552 ATTR_FLAG_TYPE_IN_PLACE);
1553 break;
1554
1555 case ATTR_LINK_ALIAS:
1556 TREE_STATIC (decl) = 1;
1557 assemble_alias (decl, attr_list->name);
1558 break;
1559
1560 case ATTR_WEAK_EXTERNAL:
1561 if (SUPPORTS_WEAK)
1562 declare_weak (decl);
1563 else
1564 post_error ("?weak declarations not supported on this target",
1565 attr_list->error_point);
1566 break;
1567
1568 case ATTR_LINK_SECTION:
1569 if (targetm.have_named_sections)
1570 {
1571 DECL_SECTION_NAME (decl)
1572 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1573 IDENTIFIER_POINTER (attr_list->name));
1574 }
1575 else
1576 post_error ("?section attributes are not supported for this target",
1577 attr_list->error_point);
1578 break;
1579 }
1580 }
1581 \f
1582 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1583 a power of 2. */
1584
1585 static bool
1586 value_factor_p (tree value, HOST_WIDE_INT factor)
1587 {
1588 if (host_integerp (value, 1))
1589 return tree_low_cst (value, 1) % factor == 0;
1590
1591 if (TREE_CODE (value) == MULT_EXPR)
1592 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1593 || value_factor_p (TREE_OPERAND (value, 1), factor));
1594
1595 return 0;
1596 }
1597
1598 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1599 unless we can prove these 2 fields are laid out in such a way that no gap
1600 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1601 is the distance in bits between the end of PREV_FIELD and the starting
1602 position of CURR_FIELD. It is ignored if null. */
1603
1604 static bool
1605 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1606 {
1607 /* If this is the first field of the record, there cannot be any gap */
1608 if (!prev_field)
1609 return 0;
1610
1611 /* If the previous field is a union type, then return False: The only
1612 time when such a field is not the last field of the record is when
1613 there are other components at fixed positions after it (meaning there
1614 was a rep clause for every field), in which case we don't want the
1615 alignment constraint to override them. */
1616 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1617 return 0;
1618
1619 /* If the distance between the end of prev_field and the begining of
1620 curr_field is constant, then there is a gap if the value of this
1621 constant is not null. */
1622 if (offset && host_integerp (offset, 1))
1623 return (!integer_zerop (offset));
1624
1625 /* If the size and position of the previous field are constant,
1626 then check the sum of this size and position. There will be a gap
1627 iff it is not multiple of the current field alignment. */
1628 if (host_integerp (DECL_SIZE (prev_field), 1)
1629 && host_integerp (bit_position (prev_field), 1))
1630 return ((tree_low_cst (bit_position (prev_field), 1)
1631 + tree_low_cst (DECL_SIZE (prev_field), 1))
1632 % DECL_ALIGN (curr_field) != 0);
1633
1634 /* If both the position and size of the previous field are multiples
1635 of the current field alignment, there can not be any gap. */
1636 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1637 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1638 return 0;
1639
1640 /* Fallback, return that there may be a potential gap */
1641 return 1;
1642 }
1643
1644 /* Returns a LABEL_DECL node for LABEL_NAME. */
1645
1646 tree
1647 create_label_decl (tree label_name)
1648 {
1649 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1650
1651 DECL_CONTEXT (label_decl) = current_function_decl;
1652 DECL_MODE (label_decl) = VOIDmode;
1653 DECL_SOURCE_LOCATION (label_decl) = input_location;
1654
1655 return label_decl;
1656 }
1657 \f
1658 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1659 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1660 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1661 PARM_DECL nodes chained through the TREE_CHAIN field).
1662
1663 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1664 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1665
1666 tree
1667 create_subprog_decl (tree subprog_name, tree asm_name,
1668 tree subprog_type, tree param_decl_list, int inline_flag,
1669 int public_flag, int extern_flag,
1670 struct attrib *attr_list, Node_Id gnat_node)
1671 {
1672 tree return_type = TREE_TYPE (subprog_type);
1673 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1674
1675 /* If this is a function nested inside an inlined external function, it
1676 means we aren't going to compile the outer function unless it is
1677 actually inlined, so do the same for us. */
1678 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1679 && DECL_EXTERNAL (current_function_decl))
1680 extern_flag = 1;
1681
1682 DECL_EXTERNAL (subprog_decl) = extern_flag;
1683 TREE_PUBLIC (subprog_decl) = public_flag;
1684 TREE_STATIC (subprog_decl) = 1;
1685 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1686 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1687 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1688 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1689 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1690
1691 if (inline_flag)
1692 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1693
1694 if (asm_name != 0)
1695 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1696
1697 process_attributes (subprog_decl, attr_list);
1698
1699 /* Add this decl to the current binding level. */
1700 gnat_pushdecl (subprog_decl, gnat_node);
1701
1702 /* Output the assembler code and/or RTL for the declaration. */
1703 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1704
1705 return subprog_decl;
1706 }
1707 \f
1708 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1709 body. This routine needs to be invoked before processing the declarations
1710 appearing in the subprogram. */
1711
1712 void
1713 begin_subprog_body (tree subprog_decl)
1714 {
1715 tree param_decl;
1716
1717 current_function_decl = subprog_decl;
1718 announce_function (subprog_decl);
1719
1720 /* Enter a new binding level and show that all the parameters belong to
1721 this function. */
1722 gnat_pushlevel ();
1723 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1724 param_decl = TREE_CHAIN (param_decl))
1725 DECL_CONTEXT (param_decl) = subprog_decl;
1726
1727 make_decl_rtl (subprog_decl, NULL);
1728
1729 /* We handle pending sizes via the elaboration of types, so we don't need to
1730 save them. This causes them to be marked as part of the outer function
1731 and then discarded. */
1732 get_pending_sizes ();
1733 }
1734
1735 /* Finish the definition of the current subprogram and compile it all the way
1736 to assembler language output. BODY is the tree corresponding to
1737 the subprogram. */
1738
1739 void
1740 end_subprog_body (tree body)
1741 {
1742 tree fndecl = current_function_decl;
1743
1744 /* Mark the BLOCK for this level as being for this function and pop the
1745 level. Since the vars in it are the parameters, clear them. */
1746 BLOCK_VARS (current_binding_level->block) = 0;
1747 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1748 DECL_INITIAL (fndecl) = current_binding_level->block;
1749 gnat_poplevel ();
1750
1751 /* Deal with inline. If declared inline or we should default to inline,
1752 set the flag in the decl. */
1753 DECL_INLINE (fndecl)
1754 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1755
1756 /* We handle pending sizes via the elaboration of types, so we don't
1757 need to save them. */
1758 get_pending_sizes ();
1759
1760 /* Mark the RESULT_DECL as being in this subprogram. */
1761 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1762
1763 DECL_SAVED_TREE (fndecl) = body;
1764
1765 current_function_decl = DECL_CONTEXT (fndecl);
1766 cfun = NULL;
1767
1768 /* If we're only annotating types, don't actually compile this function. */
1769 if (type_annotate_only)
1770 return;
1771
1772 /* We do different things for nested and non-nested functions.
1773 ??? This should be in cgraph. */
1774 if (!DECL_CONTEXT (fndecl))
1775 {
1776 gnat_gimplify_function (fndecl);
1777 lower_nested_functions (fndecl);
1778 gnat_finalize (fndecl);
1779 }
1780 else
1781 /* Register this function with cgraph just far enough to get it
1782 added to our parent's nested function list. */
1783 (void) cgraph_node (fndecl);
1784 }
1785
1786 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1787
1788 static void
1789 gnat_gimplify_function (tree fndecl)
1790 {
1791 struct cgraph_node *cgn;
1792
1793 dump_function (TDI_original, fndecl);
1794 gimplify_function_tree (fndecl);
1795 dump_function (TDI_generic, fndecl);
1796
1797 /* Convert all nested functions to GIMPLE now. We do things in this order
1798 so that items like VLA sizes are expanded properly in the context of the
1799 correct function. */
1800 cgn = cgraph_node (fndecl);
1801 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1802 gnat_gimplify_function (cgn->decl);
1803 }
1804
1805 /* Give FNDECL and all its nested functions to cgraph for compilation. */
1806
1807 static void
1808 gnat_finalize (tree fndecl)
1809 {
1810 struct cgraph_node *cgn;
1811
1812 /* Finalize all nested functions now. */
1813 cgn = cgraph_node (fndecl);
1814 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
1815 gnat_finalize (cgn->decl);
1816
1817 cgraph_finalize_function (fndecl, false);
1818 }
1819 \f
1820 /* Return a definition for a builtin function named NAME and whose data type
1821 is TYPE. TYPE should be a function type with argument types.
1822 FUNCTION_CODE tells later passes how to compile calls to this function.
1823 See tree.h for its possible values.
1824
1825 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1826 the name to be called if we can't opencode the function. If
1827 ATTRS is nonzero, use that for the function attribute list. */
1828
1829 tree
1830 builtin_function (const char *name, tree type, int function_code,
1831 enum built_in_class class, const char *library_name,
1832 tree attrs)
1833 {
1834 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1835
1836 DECL_EXTERNAL (decl) = 1;
1837 TREE_PUBLIC (decl) = 1;
1838 if (library_name)
1839 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1840
1841 gnat_pushdecl (decl, Empty);
1842 DECL_BUILT_IN_CLASS (decl) = class;
1843 DECL_FUNCTION_CODE (decl) = function_code;
1844 if (attrs)
1845 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1846 return decl;
1847 }
1848
1849 /* Return an integer type with the number of bits of precision given by
1850 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1851 it is a signed type. */
1852
1853 tree
1854 gnat_type_for_size (unsigned precision, int unsignedp)
1855 {
1856 tree t;
1857 char type_name[20];
1858
1859 if (precision <= 2 * MAX_BITS_PER_WORD
1860 && signed_and_unsigned_types[precision][unsignedp] != 0)
1861 return signed_and_unsigned_types[precision][unsignedp];
1862
1863 if (unsignedp)
1864 t = make_unsigned_type (precision);
1865 else
1866 t = make_signed_type (precision);
1867
1868 if (precision <= 2 * MAX_BITS_PER_WORD)
1869 signed_and_unsigned_types[precision][unsignedp] = t;
1870
1871 if (TYPE_NAME (t) == 0)
1872 {
1873 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1874 TYPE_NAME (t) = get_identifier (type_name);
1875 }
1876
1877 return t;
1878 }
1879
1880 /* Likewise for floating-point types. */
1881
1882 static tree
1883 float_type_for_precision (int precision, enum machine_mode mode)
1884 {
1885 tree t;
1886 char type_name[20];
1887
1888 if (float_types[(int) mode] != 0)
1889 return float_types[(int) mode];
1890
1891 float_types[(int) mode] = t = make_node (REAL_TYPE);
1892 TYPE_PRECISION (t) = precision;
1893 layout_type (t);
1894
1895 if (TYPE_MODE (t) != mode)
1896 gigi_abort (414);
1897
1898 if (TYPE_NAME (t) == 0)
1899 {
1900 sprintf (type_name, "FLOAT_%d", precision);
1901 TYPE_NAME (t) = get_identifier (type_name);
1902 }
1903
1904 return t;
1905 }
1906
1907 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1908 an unsigned type; otherwise a signed type is returned. */
1909
1910 tree
1911 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1912 {
1913 if (mode == BLKmode)
1914 return NULL_TREE;
1915 else if (mode == VOIDmode)
1916 return void_type_node;
1917 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1918 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1919 else
1920 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1921 }
1922
1923 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1924
1925 tree
1926 gnat_unsigned_type (tree type_node)
1927 {
1928 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1929
1930 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1931 {
1932 type = copy_node (type);
1933 TREE_TYPE (type) = type_node;
1934 }
1935 else if (TREE_TYPE (type_node) != 0
1936 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1937 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1938 {
1939 type = copy_node (type);
1940 TREE_TYPE (type) = TREE_TYPE (type_node);
1941 }
1942
1943 return type;
1944 }
1945
1946 /* Return the signed version of a TYPE_NODE, a scalar type. */
1947
1948 tree
1949 gnat_signed_type (tree type_node)
1950 {
1951 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1952
1953 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1954 {
1955 type = copy_node (type);
1956 TREE_TYPE (type) = type_node;
1957 }
1958 else if (TREE_TYPE (type_node) != 0
1959 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1960 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1961 {
1962 type = copy_node (type);
1963 TREE_TYPE (type) = TREE_TYPE (type_node);
1964 }
1965
1966 return type;
1967 }
1968
1969 /* Return a type the same as TYPE except unsigned or signed according to
1970 UNSIGNEDP. */
1971
1972 tree
1973 gnat_signed_or_unsigned_type (int unsignedp, tree type)
1974 {
1975 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
1976 return type;
1977 else
1978 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
1979 }
1980 \f
1981 /* EXP is an expression for the size of an object. If this size contains
1982 discriminant references, replace them with the maximum (if MAX_P) or
1983 minimum (if ! MAX_P) possible value of the discriminant. */
1984
1985 tree
1986 max_size (tree exp, int max_p)
1987 {
1988 enum tree_code code = TREE_CODE (exp);
1989 tree type = TREE_TYPE (exp);
1990
1991 switch (TREE_CODE_CLASS (code))
1992 {
1993 case 'd':
1994 case 'c':
1995 return exp;
1996
1997 case 'x':
1998 if (code == TREE_LIST)
1999 return tree_cons (TREE_PURPOSE (exp),
2000 max_size (TREE_VALUE (exp), max_p),
2001 TREE_CHAIN (exp) != 0
2002 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2003 break;
2004
2005 case 'r':
2006 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2007 modify. Otherwise, we treat it like a variable. */
2008 if (! CONTAINS_PLACEHOLDER_P (exp))
2009 return exp;
2010
2011 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2012 return
2013 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2014
2015 case '<':
2016 return max_p ? size_one_node : size_zero_node;
2017
2018 case '1':
2019 case '2':
2020 case 'e':
2021 switch (TREE_CODE_LENGTH (code))
2022 {
2023 case 1:
2024 if (code == NON_LVALUE_EXPR)
2025 return max_size (TREE_OPERAND (exp, 0), max_p);
2026 else
2027 return
2028 fold (build1 (code, type,
2029 max_size (TREE_OPERAND (exp, 0),
2030 code == NEGATE_EXPR ? ! max_p : max_p)));
2031
2032 case 2:
2033 if (code == COMPOUND_EXPR)
2034 return max_size (TREE_OPERAND (exp, 1), max_p);
2035
2036 {
2037 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2038 tree rhs = max_size (TREE_OPERAND (exp, 1),
2039 code == MINUS_EXPR ? ! max_p : max_p);
2040
2041 /* Special-case wanting the maximum value of a MIN_EXPR.
2042 In that case, if one side overflows, return the other.
2043 sizetype is signed, but we know sizes are non-negative.
2044 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2045 overflowing or the maximum possible value and the RHS
2046 a variable. */
2047 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2048 return lhs;
2049 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2050 return rhs;
2051 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2052 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2053 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2054 && ! TREE_CONSTANT (rhs))
2055 return lhs;
2056 else
2057 return fold (build (code, type, lhs, rhs));
2058 }
2059
2060 case 3:
2061 if (code == SAVE_EXPR)
2062 return exp;
2063 else if (code == COND_EXPR)
2064 return fold (build (max_p ? MAX_EXPR : MIN_EXPR, type,
2065 max_size (TREE_OPERAND (exp, 1), max_p),
2066 max_size (TREE_OPERAND (exp, 2), max_p)));
2067 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2068 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2069 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2070 }
2071 }
2072
2073 gigi_abort (408);
2074 }
2075 \f
2076 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2077 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2078 Return a constructor for the template. */
2079
2080 tree
2081 build_template (tree template_type, tree array_type, tree expr)
2082 {
2083 tree template_elts = NULL_TREE;
2084 tree bound_list = NULL_TREE;
2085 tree field;
2086
2087 if (TREE_CODE (array_type) == RECORD_TYPE
2088 && (TYPE_IS_PADDING_P (array_type)
2089 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2090 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2091
2092 if (TREE_CODE (array_type) == ARRAY_TYPE
2093 || (TREE_CODE (array_type) == INTEGER_TYPE
2094 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2095 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2096
2097 /* First make the list for a CONSTRUCTOR for the template. Go down the
2098 field list of the template instead of the type chain because this
2099 array might be an Ada array of arrays and we can't tell where the
2100 nested arrays stop being the underlying object. */
2101
2102 for (field = TYPE_FIELDS (template_type); field;
2103 (bound_list != 0
2104 ? (bound_list = TREE_CHAIN (bound_list))
2105 : (array_type = TREE_TYPE (array_type))),
2106 field = TREE_CHAIN (TREE_CHAIN (field)))
2107 {
2108 tree bounds, min, max;
2109
2110 /* If we have a bound list, get the bounds from there. Likewise
2111 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2112 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2113 This will give us a maximum range. */
2114 if (bound_list != 0)
2115 bounds = TREE_VALUE (bound_list);
2116 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2117 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2118 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2119 && DECL_BY_COMPONENT_PTR_P (expr))
2120 bounds = TREE_TYPE (field);
2121 else
2122 gigi_abort (411);
2123
2124 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2125 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2126
2127 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2128 substitute it from OBJECT. */
2129 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2130 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2131
2132 template_elts = tree_cons (TREE_CHAIN (field), max,
2133 tree_cons (field, min, template_elts));
2134 }
2135
2136 return gnat_build_constructor (template_type, nreverse (template_elts));
2137 }
2138 \f
2139 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2140 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2141 in the type contains in its DECL_INITIAL the expression to use when
2142 a constructor is made for the type. GNAT_ENTITY is an entity used
2143 to print out an error message if the mechanism cannot be applied to
2144 an object of that type and also for the name. */
2145
2146 tree
2147 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2148 {
2149 tree record_type = make_node (RECORD_TYPE);
2150 tree field_list = 0;
2151 int class;
2152 int dtype = 0;
2153 tree inner_type;
2154 int ndim;
2155 int i;
2156 tree *idx_arr;
2157 tree tem;
2158
2159 /* If TYPE is an unconstrained array, use the underlying array type. */
2160 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2161 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2162
2163 /* If this is an array, compute the number of dimensions in the array,
2164 get the index types, and point to the inner type. */
2165 if (TREE_CODE (type) != ARRAY_TYPE)
2166 ndim = 0;
2167 else
2168 for (ndim = 1, inner_type = type;
2169 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2170 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2171 ndim++, inner_type = TREE_TYPE (inner_type))
2172 ;
2173
2174 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2175
2176 if (mech != By_Descriptor_NCA
2177 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2178 for (i = ndim - 1, inner_type = type;
2179 i >= 0;
2180 i--, inner_type = TREE_TYPE (inner_type))
2181 idx_arr[i] = TYPE_DOMAIN (inner_type);
2182 else
2183 for (i = 0, inner_type = type;
2184 i < ndim;
2185 i++, inner_type = TREE_TYPE (inner_type))
2186 idx_arr[i] = TYPE_DOMAIN (inner_type);
2187
2188 /* Now get the DTYPE value. */
2189 switch (TREE_CODE (type))
2190 {
2191 case INTEGER_TYPE:
2192 case ENUMERAL_TYPE:
2193 if (TYPE_VAX_FLOATING_POINT_P (type))
2194 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2195 {
2196 case 6:
2197 dtype = 10;
2198 break;
2199 case 9:
2200 dtype = 11;
2201 break;
2202 case 15:
2203 dtype = 27;
2204 break;
2205 }
2206 else
2207 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2208 {
2209 case 8:
2210 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2211 break;
2212 case 16:
2213 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2214 break;
2215 case 32:
2216 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2217 break;
2218 case 64:
2219 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2220 break;
2221 case 128:
2222 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2223 break;
2224 }
2225 break;
2226
2227 case REAL_TYPE:
2228 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2229 break;
2230
2231 case COMPLEX_TYPE:
2232 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2233 && TYPE_VAX_FLOATING_POINT_P (type))
2234 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2235 {
2236 case 6:
2237 dtype = 12;
2238 break;
2239 case 9:
2240 dtype = 13;
2241 break;
2242 case 15:
2243 dtype = 29;
2244 }
2245 else
2246 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2247 break;
2248
2249 case ARRAY_TYPE:
2250 dtype = 14;
2251 break;
2252
2253 default:
2254 break;
2255 }
2256
2257 /* Get the CLASS value. */
2258 switch (mech)
2259 {
2260 case By_Descriptor_A:
2261 class = 4;
2262 break;
2263 case By_Descriptor_NCA:
2264 class = 10;
2265 break;
2266 case By_Descriptor_SB:
2267 class = 15;
2268 break;
2269 default:
2270 class = 1;
2271 }
2272
2273 /* Make the type for a descriptor for VMS. The first four fields
2274 are the same for all types. */
2275
2276 field_list
2277 = chainon (field_list,
2278 make_descriptor_field
2279 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2280 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2281
2282 field_list = chainon (field_list,
2283 make_descriptor_field ("DTYPE",
2284 gnat_type_for_size (8, 1),
2285 record_type, size_int (dtype)));
2286 field_list = chainon (field_list,
2287 make_descriptor_field ("CLASS",
2288 gnat_type_for_size (8, 1),
2289 record_type, size_int (class)));
2290
2291 field_list
2292 = chainon (field_list,
2293 make_descriptor_field
2294 ("POINTER",
2295 build_pointer_type_for_mode (type, SImode, false), record_type,
2296 build1 (ADDR_EXPR,
2297 build_pointer_type_for_mode (type, SImode, false),
2298 build (PLACEHOLDER_EXPR, type))));
2299
2300 switch (mech)
2301 {
2302 case By_Descriptor:
2303 case By_Descriptor_S:
2304 break;
2305
2306 case By_Descriptor_SB:
2307 field_list
2308 = chainon (field_list,
2309 make_descriptor_field
2310 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2311 TREE_CODE (type) == ARRAY_TYPE
2312 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2313 field_list
2314 = chainon (field_list,
2315 make_descriptor_field
2316 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2317 TREE_CODE (type) == ARRAY_TYPE
2318 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2319 break;
2320
2321 case By_Descriptor_A:
2322 case By_Descriptor_NCA:
2323 field_list = chainon (field_list,
2324 make_descriptor_field ("SCALE",
2325 gnat_type_for_size (8, 1),
2326 record_type,
2327 size_zero_node));
2328
2329 field_list = chainon (field_list,
2330 make_descriptor_field ("DIGITS",
2331 gnat_type_for_size (8, 1),
2332 record_type,
2333 size_zero_node));
2334
2335 field_list
2336 = chainon (field_list,
2337 make_descriptor_field
2338 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2339 size_int (mech == By_Descriptor_NCA
2340 ? 0
2341 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2342 : (TREE_CODE (type) == ARRAY_TYPE
2343 && TYPE_CONVENTION_FORTRAN_P (type)
2344 ? 224 : 192))));
2345
2346 field_list = chainon (field_list,
2347 make_descriptor_field ("DIMCT",
2348 gnat_type_for_size (8, 1),
2349 record_type,
2350 size_int (ndim)));
2351
2352 field_list = chainon (field_list,
2353 make_descriptor_field ("ARSIZE",
2354 gnat_type_for_size (32, 1),
2355 record_type,
2356 size_in_bytes (type)));
2357
2358 /* Now build a pointer to the 0,0,0... element. */
2359 tem = build (PLACEHOLDER_EXPR, type);
2360 for (i = 0, inner_type = type; i < ndim;
2361 i++, inner_type = TREE_TYPE (inner_type))
2362 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2363 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2364 NULL_TREE, NULL_TREE);
2365
2366 field_list
2367 = chainon (field_list,
2368 make_descriptor_field
2369 ("A0",
2370 build_pointer_type_for_mode (inner_type, SImode, false),
2371 record_type,
2372 build1 (ADDR_EXPR,
2373 build_pointer_type_for_mode (inner_type, SImode,
2374 false),
2375 tem)));
2376
2377 /* Next come the addressing coefficients. */
2378 tem = size_int (1);
2379 for (i = 0; i < ndim; i++)
2380 {
2381 char fname[3];
2382 tree idx_length
2383 = size_binop (MULT_EXPR, tem,
2384 size_binop (PLUS_EXPR,
2385 size_binop (MINUS_EXPR,
2386 TYPE_MAX_VALUE (idx_arr[i]),
2387 TYPE_MIN_VALUE (idx_arr[i])),
2388 size_int (1)));
2389
2390 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2391 fname[1] = '0' + i, fname[2] = 0;
2392 field_list
2393 = chainon (field_list,
2394 make_descriptor_field (fname,
2395 gnat_type_for_size (32, 1),
2396 record_type, idx_length));
2397
2398 if (mech == By_Descriptor_NCA)
2399 tem = idx_length;
2400 }
2401
2402 /* Finally here are the bounds. */
2403 for (i = 0; i < ndim; i++)
2404 {
2405 char fname[3];
2406
2407 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2408 field_list
2409 = chainon (field_list,
2410 make_descriptor_field
2411 (fname, gnat_type_for_size (32, 1), record_type,
2412 TYPE_MIN_VALUE (idx_arr[i])));
2413
2414 fname[0] = 'U';
2415 field_list
2416 = chainon (field_list,
2417 make_descriptor_field
2418 (fname, gnat_type_for_size (32, 1), record_type,
2419 TYPE_MAX_VALUE (idx_arr[i])));
2420 }
2421 break;
2422
2423 default:
2424 post_error ("unsupported descriptor type for &", gnat_entity);
2425 }
2426
2427 finish_record_type (record_type, field_list, 0, 1);
2428 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2429 NULL, 1, 0, gnat_entity);
2430
2431 return record_type;
2432 }
2433
2434 /* Utility routine for above code to make a field. */
2435
2436 static tree
2437 make_descriptor_field (const char *name, tree type,
2438 tree rec_type, tree initial)
2439 {
2440 tree field
2441 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2442
2443 DECL_INITIAL (field) = initial;
2444 return field;
2445 }
2446 \f
2447 /* Build a type to be used to represent an aliased object whose nominal
2448 type is an unconstrained array. This consists of a RECORD_TYPE containing
2449 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2450 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2451 is used to represent an arbitrary unconstrained object. Use NAME
2452 as the name of the record. */
2453
2454 tree
2455 build_unc_object_type (tree template_type, tree object_type, tree name)
2456 {
2457 tree type = make_node (RECORD_TYPE);
2458 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2459 template_type, type, 0, 0, 0, 1);
2460 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2461 type, 0, 0, 0, 1);
2462
2463 TYPE_NAME (type) = name;
2464 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2465 finish_record_type (type,
2466 chainon (chainon (NULL_TREE, template_field),
2467 array_field),
2468 0, 0);
2469
2470 return type;
2471 }
2472 \f
2473 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2474 the normal case this is just two adjustments, but we have more to do
2475 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2476
2477 void
2478 update_pointer_to (tree old_type, tree new_type)
2479 {
2480 tree ptr = TYPE_POINTER_TO (old_type);
2481 tree ref = TYPE_REFERENCE_TO (old_type);
2482 tree ptr1, ref1;
2483 tree type;
2484
2485 /* If this is the main variant, process all the other variants first. */
2486 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2487 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2488 type = TYPE_NEXT_VARIANT (type))
2489 update_pointer_to (type, new_type);
2490
2491 /* If no pointer or reference, we are done. */
2492 if (ptr == 0 && ref == 0)
2493 return;
2494
2495 /* Merge the old type qualifiers in the new type.
2496
2497 Each old variant has qualifiers for specific reasons, and the new
2498 designated type as well. Each set of qualifiers represents useful
2499 information grabbed at some point, and merging the two simply unifies
2500 these inputs into the final type description.
2501
2502 Consider for instance a volatile type frozen after an access to constant
2503 type designating it. After the designated type freeze, we get here with a
2504 volatile new_type and a dummy old_type with a readonly variant, created
2505 when the access type was processed. We shall make a volatile and readonly
2506 designated type, because that's what it really is.
2507
2508 We might also get here for a non-dummy old_type variant with different
2509 qualifiers than the new_type ones, for instance in some cases of pointers
2510 to private record type elaboration (see the comments around the call to
2511 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2512 qualifiers in thoses cases too, to avoid accidentally discarding the
2513 initial set, and will often end up with old_type == new_type then. */
2514 new_type = build_qualified_type (new_type,
2515 TYPE_QUALS (old_type)
2516 | TYPE_QUALS (new_type));
2517
2518 /* If the new type and the old one are identical, there is nothing to
2519 update. */
2520 if (old_type == new_type)
2521 return;
2522
2523 /* Otherwise, first handle the simple case. */
2524 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2525 {
2526 TYPE_POINTER_TO (new_type) = ptr;
2527 TYPE_REFERENCE_TO (new_type) = ref;
2528
2529 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2530 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2531 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2532 {
2533 TREE_TYPE (ptr1) = new_type;
2534
2535 if (TYPE_NAME (ptr1) != 0
2536 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2537 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2538 rest_of_decl_compilation (TYPE_NAME (ptr1), NULL,
2539 global_bindings_p (), 0);
2540 }
2541
2542 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2543 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2544 ref1 = TYPE_NEXT_VARIANT (ref1))
2545 {
2546 TREE_TYPE (ref1) = new_type;
2547
2548 if (TYPE_NAME (ref1) != 0
2549 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2550 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2551 rest_of_decl_compilation (TYPE_NAME (ref1), NULL,
2552 global_bindings_p (), 0);
2553 }
2554 }
2555
2556 /* Now deal with the unconstrained array case. In this case the "pointer"
2557 is actually a RECORD_TYPE where the types of both fields are
2558 pointers to void. In that case, copy the field list from the
2559 old type to the new one and update the fields' context. */
2560 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2561 gigi_abort (412);
2562
2563 else
2564 {
2565 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2566 tree ptr_temp_type;
2567 tree new_ref;
2568 tree var;
2569
2570 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2571 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2572 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2573
2574 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2575 template bounds.
2576
2577 ??? This is now the only use of gnat_substitute_in_type, which
2578 is now a very "heavy" routine to do this, so it should be replaced
2579 at some point. */
2580 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2581 new_ref = build (COMPONENT_REF, ptr_temp_type,
2582 build (PLACEHOLDER_EXPR, ptr),
2583 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2584
2585 update_pointer_to
2586 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2587 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2588 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2589
2590 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2591 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2592
2593 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2594 = TREE_TYPE (new_type) = ptr;
2595
2596 /* Now handle updating the allocation record, what the thin pointer
2597 points to. Update all pointers from the old record into the new
2598 one, update the types of the fields, and recompute the size. */
2599
2600 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2601
2602 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2603 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2604 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2605 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2606 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2607 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2608 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2609
2610 TYPE_SIZE (new_obj_rec)
2611 = size_binop (PLUS_EXPR,
2612 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2613 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2614 TYPE_SIZE_UNIT (new_obj_rec)
2615 = size_binop (PLUS_EXPR,
2616 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2617 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2618 rest_of_type_compilation (ptr, global_bindings_p ());
2619 }
2620 }
2621 \f
2622 /* Convert a pointer to a constrained array into a pointer to a fat
2623 pointer. This involves making or finding a template. */
2624
2625 static tree
2626 convert_to_fat_pointer (tree type, tree expr)
2627 {
2628 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2629 tree template, template_addr;
2630 tree etype = TREE_TYPE (expr);
2631
2632 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2633 pointer to the template and array. */
2634 if (integer_zerop (expr))
2635 return
2636 gnat_build_constructor
2637 (type,
2638 tree_cons (TYPE_FIELDS (type),
2639 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2640 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2641 convert (build_pointer_type (template_type),
2642 expr),
2643 NULL_TREE)));
2644
2645 /* If EXPR is a thin pointer, make the template and data from the record. */
2646
2647 else if (TYPE_THIN_POINTER_P (etype))
2648 {
2649 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2650
2651 expr = save_expr (expr);
2652 if (TREE_CODE (expr) == ADDR_EXPR)
2653 expr = TREE_OPERAND (expr, 0);
2654 else
2655 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2656
2657 template = build_component_ref (expr, NULL_TREE, fields, 0);
2658 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2659 build_component_ref (expr, NULL_TREE,
2660 TREE_CHAIN (fields), 0));
2661 }
2662 else
2663 /* Otherwise, build the constructor for the template. */
2664 template = build_template (template_type, TREE_TYPE (etype), expr);
2665
2666 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2667
2668 /* The result is a CONSTRUCTOR for the fat pointer.
2669
2670 If expr is an argument of a foreign convention subprogram, the type it
2671 points to is directly the component type. In this case, the expression
2672 type may not match the corresponding FIELD_DECL type at this point, so we
2673 call "convert" here to fix that up if necessary. This type consistency is
2674 required, for instance because it ensures that possible later folding of
2675 component_refs against this constructor always yields something of the
2676 same type as the initial reference.
2677
2678 Note that the call to "build_template" above is still fine, because it
2679 will only refer to the provided template_type in this case. */
2680 return
2681 gnat_build_constructor
2682 (type, tree_cons (TYPE_FIELDS (type),
2683 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2684 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2685 template_addr, NULL_TREE)));
2686 }
2687 \f
2688 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2689 is something that is a fat pointer, so convert to it first if it EXPR
2690 is not already a fat pointer. */
2691
2692 static tree
2693 convert_to_thin_pointer (tree type, tree expr)
2694 {
2695 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2696 expr
2697 = convert_to_fat_pointer
2698 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2699
2700 /* We get the pointer to the data and use a NOP_EXPR to make it the
2701 proper GCC type. */
2702 expr
2703 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2704 expr = build1 (NOP_EXPR, type, expr);
2705
2706 return expr;
2707 }
2708 \f
2709 /* Create an expression whose value is that of EXPR,
2710 converted to type TYPE. The TREE_TYPE of the value
2711 is always TYPE. This function implements all reasonable
2712 conversions; callers should filter out those that are
2713 not permitted by the language being compiled. */
2714
2715 tree
2716 convert (tree type, tree expr)
2717 {
2718 enum tree_code code = TREE_CODE (type);
2719 tree etype = TREE_TYPE (expr);
2720 enum tree_code ecode = TREE_CODE (etype);
2721 tree tem;
2722
2723 /* If EXPR is already the right type, we are done. */
2724 if (type == etype)
2725 return expr;
2726
2727 /* If the input type has padding, remove it by doing a component reference
2728 to the field. If the output type has padding, make a constructor
2729 to build the record. If both input and output have padding and are
2730 of variable size, do this as an unchecked conversion. */
2731 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2732 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2733 && (! TREE_CONSTANT (TYPE_SIZE (type))
2734 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2735 ;
2736 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2737 {
2738 /* If we have just converted to this padded type, just get
2739 the inner expression. */
2740 if (TREE_CODE (expr) == CONSTRUCTOR
2741 && CONSTRUCTOR_ELTS (expr) != 0
2742 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2743 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2744 else
2745 return convert (type, build_component_ref (expr, NULL_TREE,
2746 TYPE_FIELDS (etype), 0));
2747 }
2748 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2749 {
2750 /* If we previously converted from another type and our type is
2751 of variable size, remove the conversion to avoid the need for
2752 variable-size temporaries. */
2753 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2754 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2755 expr = TREE_OPERAND (expr, 0);
2756
2757 /* If we are just removing the padding from expr, convert the original
2758 object if we have variable size. That will avoid the need
2759 for some variable-size temporaries. */
2760 if (TREE_CODE (expr) == COMPONENT_REF
2761 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2762 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2763 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2764 return convert (type, TREE_OPERAND (expr, 0));
2765
2766 /* If the result type is a padded type with a self-referentially-sized
2767 field and the expression type is a record, do this as an
2768 unchecked converstion. */
2769 else if (TREE_CODE (etype) == RECORD_TYPE
2770 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2771 return unchecked_convert (type, expr, 0);
2772
2773 else
2774 return
2775 gnat_build_constructor (type,
2776 tree_cons (TYPE_FIELDS (type),
2777 convert (TREE_TYPE
2778 (TYPE_FIELDS (type)),
2779 expr),
2780 NULL_TREE));
2781 }
2782
2783 /* If the input is a biased type, adjust first. */
2784 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2785 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2786 fold (build1 (NOP_EXPR,
2787 TREE_TYPE (etype), expr)),
2788 TYPE_MIN_VALUE (etype))));
2789
2790 /* If the input is a left-justified modular type, we need to extract
2791 the actual object before converting it to any other type with the
2792 exception of an unconstrained array. */
2793 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2794 && code != UNCONSTRAINED_ARRAY_TYPE)
2795 return convert (type, build_component_ref (expr, NULL_TREE,
2796 TYPE_FIELDS (etype), 0));
2797
2798 /* If converting to a type that contains a template, convert to the data
2799 type and then build the template. */
2800 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2801 {
2802 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2803
2804 /* If the source already has a template, get a reference to the
2805 associated array only, as we are going to rebuild a template
2806 for the target type anyway. */
2807 expr = maybe_unconstrained_array (expr);
2808
2809 return
2810 gnat_build_constructor
2811 (type,
2812 tree_cons (TYPE_FIELDS (type),
2813 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2814 obj_type, NULL_TREE),
2815 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2816 convert (obj_type, expr), NULL_TREE)));
2817 }
2818
2819 /* There are some special cases of expressions that we process
2820 specially. */
2821 switch (TREE_CODE (expr))
2822 {
2823 case ERROR_MARK:
2824 return expr;
2825
2826 case NULL_EXPR:
2827 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2828 conversion in gnat_expand_expr. NULL_EXPR does not represent
2829 and actual value, so no conversion is needed. */
2830 expr = copy_node (expr);
2831 TREE_TYPE (expr) = type;
2832 return expr;
2833
2834 case STRING_CST:
2835 /* If we are converting a STRING_CST to another constrained array type,
2836 just make a new one in the proper type. */
2837 if (code == ecode && AGGREGATE_TYPE_P (etype)
2838 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2839 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2840 && (TREE_CODE (expr) == STRING_CST
2841 || get_alias_set (etype) == get_alias_set (type)))
2842 {
2843 expr = copy_node (expr);
2844 TREE_TYPE (expr) = type;
2845 return expr;
2846 }
2847 break;
2848
2849 case UNCONSTRAINED_ARRAY_REF:
2850 /* Convert this to the type of the inner array by getting the address of
2851 the array from the template. */
2852 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2853 build_component_ref (TREE_OPERAND (expr, 0),
2854 get_identifier ("P_ARRAY"),
2855 NULL_TREE, 0));
2856 etype = TREE_TYPE (expr);
2857 ecode = TREE_CODE (etype);
2858 break;
2859
2860 case VIEW_CONVERT_EXPR:
2861 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2862 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2863 return convert (type, TREE_OPERAND (expr, 0));
2864 break;
2865
2866 case INDIRECT_REF:
2867 /* If both types are record types, just convert the pointer and
2868 make a new INDIRECT_REF.
2869
2870 ??? Disable this for now since it causes problems with the
2871 code in build_binary_op for MODIFY_EXPR which wants to
2872 strip off conversions. But that code really is a mess and
2873 we need to do this a much better way some time. */
2874 if (0
2875 && (TREE_CODE (type) == RECORD_TYPE
2876 || TREE_CODE (type) == UNION_TYPE)
2877 && (TREE_CODE (etype) == RECORD_TYPE
2878 || TREE_CODE (etype) == UNION_TYPE)
2879 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2880 return build_unary_op (INDIRECT_REF, NULL_TREE,
2881 convert (build_pointer_type (type),
2882 TREE_OPERAND (expr, 0)));
2883 break;
2884
2885 default:
2886 break;
2887 }
2888
2889 /* Check for converting to a pointer to an unconstrained array. */
2890 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
2891 return convert_to_fat_pointer (type, expr);
2892
2893 /* If we're converting between two aggregate types that have the same main
2894 variant, just make a VIEW_CONVER_EXPR. */
2895 else if (AGGREGATE_TYPE_P (type)
2896 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2897 return build1 (VIEW_CONVERT_EXPR, type, expr);
2898
2899 /* In all other cases of related types, make a NOP_EXPR. */
2900 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2901 || (code == INTEGER_CST && ecode == INTEGER_CST
2902 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2903 return fold (build1 (NOP_EXPR, type, expr));
2904
2905 switch (code)
2906 {
2907 case VOID_TYPE:
2908 return build1 (CONVERT_EXPR, type, expr);
2909
2910 case BOOLEAN_TYPE:
2911 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
2912
2913 case INTEGER_TYPE:
2914 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2915 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2916 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2917 return unchecked_convert (type, expr, 0);
2918 else if (TYPE_BIASED_REPRESENTATION_P (type))
2919 return fold (build1 (CONVERT_EXPR, type,
2920 fold (build (MINUS_EXPR, TREE_TYPE (type),
2921 convert (TREE_TYPE (type), expr),
2922 TYPE_MIN_VALUE (type)))));
2923
2924 /* ... fall through ... */
2925
2926 case ENUMERAL_TYPE:
2927 return fold (convert_to_integer (type, expr));
2928
2929 case POINTER_TYPE:
2930 case REFERENCE_TYPE:
2931 /* If converting between two pointers to records denoting
2932 both a template and type, adjust if needed to account
2933 for any differing offsets, since one might be negative. */
2934 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2935 {
2936 tree bit_diff
2937 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2938 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2939 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2940 sbitsize_int (BITS_PER_UNIT));
2941
2942 expr = build1 (NOP_EXPR, type, expr);
2943 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2944 if (integer_zerop (byte_diff))
2945 return expr;
2946
2947 return build_binary_op (PLUS_EXPR, type, expr,
2948 fold (convert_to_pointer (type, byte_diff)));
2949 }
2950
2951 /* If converting to a thin pointer, handle specially. */
2952 if (TYPE_THIN_POINTER_P (type)
2953 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
2954 return convert_to_thin_pointer (type, expr);
2955
2956 /* If converting fat pointer to normal pointer, get the pointer to the
2957 array and then convert it. */
2958 else if (TYPE_FAT_POINTER_P (etype))
2959 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2960 NULL_TREE, 0);
2961
2962 return fold (convert_to_pointer (type, expr));
2963
2964 case REAL_TYPE:
2965 return fold (convert_to_real (type, expr));
2966
2967 case RECORD_TYPE:
2968 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
2969 return
2970 gnat_build_constructor
2971 (type, tree_cons (TYPE_FIELDS (type),
2972 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2973 NULL_TREE));
2974
2975 /* ... fall through ... */
2976
2977 case ARRAY_TYPE:
2978 /* In these cases, assume the front-end has validated the conversion.
2979 If the conversion is valid, it will be a bit-wise conversion, so
2980 it can be viewed as an unchecked conversion. */
2981 return unchecked_convert (type, expr, 0);
2982
2983 case UNION_TYPE:
2984 /* Just validate that the type is indeed that of a field
2985 of the type. Then make the simple conversion. */
2986 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
2987 {
2988 if (TREE_TYPE (tem) == etype)
2989 return build1 (CONVERT_EXPR, type, expr);
2990 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
2991 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
2992 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
2993 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
2994 return build1 (CONVERT_EXPR, type,
2995 convert (TREE_TYPE (tem), expr));
2996 }
2997
2998 gigi_abort (413);
2999
3000 case UNCONSTRAINED_ARRAY_TYPE:
3001 /* If EXPR is a constrained array, take its address, convert it to a
3002 fat pointer, and then dereference it. Likewise if EXPR is a
3003 record containing both a template and a constrained array.
3004 Note that a record representing a left justified modular type
3005 always represents a packed constrained array. */
3006 if (ecode == ARRAY_TYPE
3007 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3008 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3009 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3010 return
3011 build_unary_op
3012 (INDIRECT_REF, NULL_TREE,
3013 convert_to_fat_pointer (TREE_TYPE (type),
3014 build_unary_op (ADDR_EXPR,
3015 NULL_TREE, expr)));
3016
3017 /* Do something very similar for converting one unconstrained
3018 array to another. */
3019 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3020 return
3021 build_unary_op (INDIRECT_REF, NULL_TREE,
3022 convert (TREE_TYPE (type),
3023 build_unary_op (ADDR_EXPR,
3024 NULL_TREE, expr)));
3025 else
3026 gigi_abort (409);
3027
3028 case COMPLEX_TYPE:
3029 return fold (convert_to_complex (type, expr));
3030
3031 default:
3032 gigi_abort (410);
3033 }
3034 }
3035 \f
3036 /* Remove all conversions that are done in EXP. This includes converting
3037 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3038 is nonzero, always return the address of the containing object even if
3039 the address is not bit-aligned. */
3040
3041 tree
3042 remove_conversions (tree exp, int true_address)
3043 {
3044 switch (TREE_CODE (exp))
3045 {
3046 case CONSTRUCTOR:
3047 if (true_address
3048 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3049 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3050 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3051 break;
3052
3053 case COMPONENT_REF:
3054 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3055 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3056 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3057 break;
3058
3059 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3060 case NOP_EXPR: case CONVERT_EXPR:
3061 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3062
3063 default:
3064 break;
3065 }
3066
3067 return exp;
3068 }
3069 \f
3070 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3071 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3072 likewise return an expression pointing to the underlying array. */
3073
3074 tree
3075 maybe_unconstrained_array (tree exp)
3076 {
3077 enum tree_code code = TREE_CODE (exp);
3078 tree new;
3079
3080 switch (TREE_CODE (TREE_TYPE (exp)))
3081 {
3082 case UNCONSTRAINED_ARRAY_TYPE:
3083 if (code == UNCONSTRAINED_ARRAY_REF)
3084 {
3085 new
3086 = build_unary_op (INDIRECT_REF, NULL_TREE,
3087 build_component_ref (TREE_OPERAND (exp, 0),
3088 get_identifier ("P_ARRAY"),
3089 NULL_TREE, 0));
3090 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3091 return new;
3092 }
3093
3094 else if (code == NULL_EXPR)
3095 return build1 (NULL_EXPR,
3096 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3097 (TREE_TYPE (TREE_TYPE (exp))))),
3098 TREE_OPERAND (exp, 0));
3099
3100 case RECORD_TYPE:
3101 /* If this is a padded type, convert to the unpadded type and see if
3102 it contains a template. */
3103 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3104 {
3105 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3106 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3107 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3108 return
3109 build_component_ref (new, NULL_TREE,
3110 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3111 0);
3112 }
3113 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3114 return
3115 build_component_ref (exp, NULL_TREE,
3116 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3117 break;
3118
3119 default:
3120 break;
3121 }
3122
3123 return exp;
3124 }
3125 \f
3126 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3127 If NOTRUNC_P is set, truncation operations should be suppressed. */
3128
3129 tree
3130 unchecked_convert (tree type, tree expr, int notrunc_p)
3131 {
3132 tree etype = TREE_TYPE (expr);
3133
3134 /* If the expression is already the right type, we are done. */
3135 if (etype == type)
3136 return expr;
3137
3138 /* If both types types are integral just do a normal conversion.
3139 Likewise for a conversion to an unconstrained array. */
3140 if ((((INTEGRAL_TYPE_P (type)
3141 && ! (TREE_CODE (type) == INTEGER_TYPE
3142 && TYPE_VAX_FLOATING_POINT_P (type)))
3143 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3144 || (TREE_CODE (type) == RECORD_TYPE
3145 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3146 && ((INTEGRAL_TYPE_P (etype)
3147 && ! (TREE_CODE (etype) == INTEGER_TYPE
3148 && TYPE_VAX_FLOATING_POINT_P (etype)))
3149 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3150 || (TREE_CODE (etype) == RECORD_TYPE
3151 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3152 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3153 {
3154 tree rtype = type;
3155
3156 if (TREE_CODE (etype) == INTEGER_TYPE
3157 && TYPE_BIASED_REPRESENTATION_P (etype))
3158 {
3159 tree ntype = copy_type (etype);
3160
3161 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3162 TYPE_MAIN_VARIANT (ntype) = ntype;
3163 expr = build1 (NOP_EXPR, ntype, expr);
3164 }
3165
3166 if (TREE_CODE (type) == INTEGER_TYPE
3167 && TYPE_BIASED_REPRESENTATION_P (type))
3168 {
3169 rtype = copy_type (type);
3170 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3171 TYPE_MAIN_VARIANT (rtype) = rtype;
3172 }
3173
3174 expr = convert (rtype, expr);
3175 if (type != rtype)
3176 expr = build1 (NOP_EXPR, type, expr);
3177 }
3178
3179 /* If we are converting TO an integral type whose precision is not the
3180 same as its size, first unchecked convert to a record that contains
3181 an object of the output type. Then extract the field. */
3182 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3183 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3184 GET_MODE_BITSIZE (TYPE_MODE (type))))
3185 {
3186 tree rec_type = make_node (RECORD_TYPE);
3187 tree field = create_field_decl (get_identifier ("OBJ"), type,
3188 rec_type, 1, 0, 0, 0);
3189
3190 TYPE_FIELDS (rec_type) = field;
3191 layout_type (rec_type);
3192
3193 expr = unchecked_convert (rec_type, expr, notrunc_p);
3194 expr = build_component_ref (expr, NULL_TREE, field, 0);
3195 }
3196
3197 /* Similarly for integral input type whose precision is not equal to its
3198 size. */
3199 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3200 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3201 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3202 {
3203 tree rec_type = make_node (RECORD_TYPE);
3204 tree field
3205 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3206 1, 0, 0, 0);
3207
3208 TYPE_FIELDS (rec_type) = field;
3209 layout_type (rec_type);
3210
3211 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3212 expr = unchecked_convert (type, expr, notrunc_p);
3213 }
3214
3215 /* We have a special case when we are converting between two
3216 unconstrained array types. In that case, take the address,
3217 convert the fat pointer types, and dereference. */
3218 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3219 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3220 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3221 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3222 build_unary_op (ADDR_EXPR, NULL_TREE,
3223 expr)));
3224 else
3225 {
3226 expr = maybe_unconstrained_array (expr);
3227
3228 /* There's no point in doing two unchecked conversions in a row. */
3229 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3230 expr = TREE_OPERAND (expr, 0);
3231
3232 etype = TREE_TYPE (expr);
3233 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3234 }
3235
3236 /* If the result is an integral type whose size is not equal to
3237 the size of the underlying machine type, sign- or zero-extend
3238 the result. We need not do this in the case where the input is
3239 an integral type of the same precision and signedness or if the output
3240 is a biased type or if both the input and output are unsigned. */
3241 if (! notrunc_p
3242 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3243 && ! (TREE_CODE (type) == INTEGER_TYPE
3244 && TYPE_BIASED_REPRESENTATION_P (type))
3245 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3246 GET_MODE_BITSIZE (TYPE_MODE (type)))
3247 && ! (INTEGRAL_TYPE_P (etype)
3248 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3249 && operand_equal_p (TYPE_RM_SIZE (type),
3250 (TYPE_RM_SIZE (etype) != 0
3251 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3252 0))
3253 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3254 {
3255 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3256 TYPE_UNSIGNED (type));
3257 tree shift_expr
3258 = convert (base_type,
3259 size_binop (MINUS_EXPR,
3260 bitsize_int
3261 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3262 TYPE_RM_SIZE (type)));
3263 expr
3264 = convert (type,
3265 build_binary_op (RSHIFT_EXPR, base_type,
3266 build_binary_op (LSHIFT_EXPR, base_type,
3267 convert (base_type, expr),
3268 shift_expr),
3269 shift_expr));
3270 }
3271
3272 /* An unchecked conversion should never raise Constraint_Error. The code
3273 below assumes that GCC's conversion routines overflow the same way that
3274 the underlying hardware does. This is probably true. In the rare case
3275 when it is false, we can rely on the fact that such conversions are
3276 erroneous anyway. */
3277 if (TREE_CODE (expr) == INTEGER_CST)
3278 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3279
3280 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3281 show no longer constant. */
3282 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3283 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3284 OEP_ONLY_CONST))
3285 TREE_CONSTANT (expr) = 0;
3286
3287 return expr;
3288 }
3289
3290 #include "gt-ada-utils.h"
3291 #include "gtype-ada.h"