ada-tree.def (TRANSFORM_EXPR, [...]): Deleted.
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41 #include "cgraph.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
63
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
67
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
70
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
73
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
76
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
81
82 /* This listhead is used to record any global objects that need elaboration.
83 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
84 initial value to assign. */
85
86 static GTY(()) tree pending_elaborations;
87
88 /* This stack allows us to momentarily switch to generating elaboration
89 lists for an inner context. */
90
91 struct e_stack GTY((chain_next ("%h.next"))) {
92 struct e_stack *next;
93 tree elab_list;
94 };
95 static GTY(()) struct e_stack *elist_stack;
96
97 /* This variable keeps a table for types for each precision so that we only
98 allocate each of them once. Signed and unsigned types are kept separate.
99
100 Note that these types are only used when fold-const requests something
101 special. Perhaps we should NOT share these types; we'll see how it
102 goes later. */
103 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
104
105 /* Likewise for float types, but record these by mode. */
106 static GTY(()) tree float_types[NUM_MACHINE_MODES];
107
108 /* For each binding contour we allocate a binding_level structure to indicate
109 the binding depth. */
110
111 struct ada_binding_level GTY((chain_next ("%h.chain")))
112 {
113 /* The binding level containing this one (the enclosing binding level). */
114 struct ada_binding_level *chain;
115 /* The BLOCK node for this level. */
116 tree block;
117 /* If nonzero, the setjmp buffer that needs to be updated for any
118 variable-sized definition within this context. */
119 tree jmpbuf_decl;
120 };
121
122 /* The binding level currently in effect. */
123 static GTY(()) struct ada_binding_level *current_binding_level;
124
125 /* A chain of ada_binding_level structures awaiting reuse. */
126 static GTY((deletable)) struct ada_binding_level *free_binding_level;
127
128 /* A chain of unused BLOCK nodes. */
129 static GTY((deletable)) tree free_block_chain;
130
131 struct language_function GTY(())
132 {
133 int unused;
134 };
135
136 static void gnat_define_builtin (const char *, tree, int, const char *, bool);
137 static void gnat_install_builtins (void);
138 static tree merge_sizes (tree, tree, tree, int, int);
139 static tree compute_related_constant (tree, tree);
140 static tree split_plus (tree, tree *);
141 static int value_zerop (tree);
142 static void gnat_gimplify_function (tree);
143 static void gnat_finalize (tree);
144 static tree float_type_for_precision (int, enum machine_mode);
145 static tree convert_to_fat_pointer (tree, tree);
146 static tree convert_to_thin_pointer (tree, tree);
147 static tree make_descriptor_field (const char *,tree, tree, tree);
148 static int value_factor_p (tree, int);
149 static int potential_alignment_gap (tree, tree, tree);
150 \f
151 /* Initialize the association of GNAT nodes to GCC trees. */
152
153 void
154 init_gnat_to_gnu (void)
155 {
156 associate_gnat_to_gnu
157 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
158
159 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
160 }
161
162 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
163 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
164 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
165
166 If GNU_DECL is zero, a previous association is to be reset. */
167
168 void
169 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
170 {
171 /* Check that GNAT_ENTITY is not already defined and that it is being set
172 to something which is a decl. Raise gigi 401 if not. Usually, this
173 means GNAT_ENTITY is defined twice, but occasionally is due to some
174 Gigi problem. */
175 if (gnu_decl
176 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
177 || (! no_check && ! DECL_P (gnu_decl))))
178 gigi_abort (401);
179
180 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
181 }
182
183 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
184 Return the ..._DECL node that was associated with it. If there is no tree
185 node associated with GNAT_ENTITY, abort.
186
187 In some cases, such as delayed elaboration or expressions that need to
188 be elaborated only once, GNAT_ENTITY is really not an entity. */
189
190 tree
191 get_gnu_tree (Entity_Id gnat_entity)
192 {
193 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
194 gigi_abort (402);
195
196 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
197 }
198
199 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
200
201 int
202 present_gnu_tree (Entity_Id gnat_entity)
203 {
204 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
205 }
206
207 \f
208 /* Return non-zero if we are currently in the global binding level. */
209
210 int
211 global_bindings_p (void)
212 {
213 return (force_global != 0 || current_binding_level->chain == 0 ? -1 : 0);
214 }
215
216 /* Return the list of declarations in the current level. Note that this list
217 is in reverse order (it has to be so for back-end compatibility). */
218
219 tree
220 getdecls (void)
221 {
222 return BLOCK_VARS (current_binding_level->block);
223 }
224
225 /* Enter a new binding level. */
226
227 void
228 gnat_pushlevel ()
229 {
230 struct ada_binding_level *newlevel = NULL;
231
232 /* Reuse a struct for this binding level, if there is one. */
233 if (free_binding_level)
234 {
235 newlevel = free_binding_level;
236 free_binding_level = free_binding_level->chain;
237 }
238 else
239 newlevel
240 = (struct ada_binding_level *)
241 ggc_alloc (sizeof (struct ada_binding_level));
242
243 /* Use a free BLOCK, if any; otherwise, allocate one. */
244 if (free_block_chain)
245 {
246 newlevel->block = free_block_chain;
247 free_block_chain = TREE_CHAIN (free_block_chain);
248 TREE_CHAIN (newlevel->block) = NULL_TREE;
249 }
250 else
251 newlevel->block = make_node (BLOCK);
252
253 /* Point the BLOCK we just made to its parent. */
254 if (current_binding_level)
255 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
256
257 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
258
259 /* Add this level to the front of the chain (stack) of levels that are
260 active. */
261 newlevel->chain = current_binding_level;
262 newlevel->jmpbuf_decl = NULL_TREE;
263 current_binding_level = newlevel;
264 }
265
266 /* Set the jmpbuf_decl for the current binding level to DECL. */
267
268 void
269 set_block_jmpbuf_decl (tree decl)
270 {
271 current_binding_level->jmpbuf_decl = decl;
272 }
273
274 /* Get the jmpbuf_decl, if any, for the current binding level. */
275
276 tree
277 get_block_jmpbuf_decl ()
278 {
279 return current_binding_level->jmpbuf_decl;
280 }
281
282 /* Exit a binding level. Set any BLOCK into the current code group. */
283
284 void
285 gnat_poplevel ()
286 {
287 struct ada_binding_level *level = current_binding_level;
288 tree block = level->block;
289
290 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
291 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
292
293 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
294 are no variables free the block and merge its subblocks into those of its
295 parent block. Otherwise, add it to the list of its parent. */
296 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
297 ;
298 else if (BLOCK_VARS (block) == NULL_TREE)
299 {
300 BLOCK_SUBBLOCKS (level->chain->block)
301 = chainon (BLOCK_SUBBLOCKS (block),
302 BLOCK_SUBBLOCKS (level->chain->block));
303 TREE_CHAIN (block) = free_block_chain;
304 free_block_chain = block;
305 }
306 else
307 {
308 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
309 BLOCK_SUBBLOCKS (level->chain->block) = block;
310 TREE_USED (block) = 1;
311 set_block_for_group (block);
312 }
313
314 /* Free this binding structure. */
315 current_binding_level = level->chain;
316 level->chain = free_binding_level;
317 free_binding_level = level;
318 }
319
320 /* Insert BLOCK at the end of the list of subblocks of the
321 current binding level. This is used when a BIND_EXPR is expanded,
322 to handle the BLOCK node inside the BIND_EXPR. */
323
324 void
325 insert_block (tree block)
326 {
327 TREE_USED (block) = 1;
328 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
329 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
330 }
331
332 /* Return nonzero if the current binding has any variables. This means
333 it will have a BLOCK node. */
334
335 int
336 block_has_vars ()
337 {
338 return BLOCK_VARS (current_binding_level->block) != 0;
339 }
340 \f
341 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
342 Returns the ..._DECL node. */
343
344 tree
345 pushdecl (tree decl)
346 {
347 /* If at top level, there is no context. But PARM_DECLs always go in the
348 level of its function. */
349 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
350 DECL_CONTEXT (decl) = 0;
351 else
352 DECL_CONTEXT (decl) = current_function_decl;
353
354 /* Put the declaration on the list. The list of declarations is in reverse
355 order. The list will be reversed later.
356
357 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
358 will cause trouble with the debugger and aren't needed anyway. */
359 if (TREE_CODE (decl) != TYPE_DECL
360 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
361 {
362 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
363 BLOCK_VARS (current_binding_level->block) = decl;
364 }
365
366 /* For the declaration of a type, set its name if it either is not already
367 set, was set to an IDENTIFIER_NODE, indicating an internal name,
368 or if the previous type name was not derived from a source name.
369 We'd rather have the type named with a real name and all the pointer
370 types to the same object have the same POINTER_TYPE node. Code in this
371 function in c-decl.c makes a copy of the type node here, but that may
372 cause us trouble with incomplete types, so let's not try it (at least
373 for now). */
374
375 if (TREE_CODE (decl) == TYPE_DECL
376 && DECL_NAME (decl) != 0
377 && (TYPE_NAME (TREE_TYPE (decl)) == 0
378 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
379 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
380 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
381 && ! DECL_ARTIFICIAL (decl))))
382 TYPE_NAME (TREE_TYPE (decl)) = decl;
383
384 return decl;
385 }
386 \f
387 /* Do little here. Set up the standard declarations later after the
388 front end has been run. */
389
390 void
391 gnat_init_decl_processing (void)
392 {
393 input_line = 0;
394
395 /* Make the binding_level structure for global names. */
396 current_function_decl = 0;
397 current_binding_level = 0;
398 free_binding_level = 0;
399 gnat_pushlevel ();
400
401 build_common_tree_nodes (0);
402
403 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
404 corresponding to the size of Pmode. In most cases when ptr_mode and
405 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
406 far better code using the width of Pmode. Make this here since we need
407 this before we can expand the GNAT types. */
408 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
409 set_sizetype (size_type_node);
410 build_common_tree_nodes_2 (0);
411
412 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
413
414 /* We need to make the integer type before doing anything else.
415 We stitch this in to the appropriate GNAT type later. */
416 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
417 integer_type_node));
418 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
419 char_type_node));
420
421 ptr_void_type_node = build_pointer_type (void_type_node);
422
423 gnat_install_builtins ();
424 }
425
426 /* Define a builtin function. This is temporary and is just being done
427 to initialize implicit_built_in_decls for the middle-end. We'll want
428 to do full builtin processing soon. */
429
430 static void
431 gnat_define_builtin (const char *name, tree type,
432 int function_code, const char *library_name, bool const_p)
433 {
434 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
435
436 DECL_EXTERNAL (decl) = 1;
437 TREE_PUBLIC (decl) = 1;
438 if (library_name)
439 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
440 make_decl_rtl (decl, NULL);
441 pushdecl (decl);
442 DECL_BUILT_IN_CLASS (decl) = BUILT_IN_NORMAL;
443 DECL_FUNCTION_CODE (decl) = function_code;
444 TREE_READONLY (decl) = const_p;
445
446 implicit_built_in_decls[function_code] = decl;
447 }
448
449 /* Install the builtin functions the middle-end needs. */
450
451 static void
452 gnat_install_builtins ()
453 {
454 tree ftype;
455 tree tmp;
456
457 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
458 tmp = tree_cons (NULL_TREE, long_integer_type_node, tmp);
459 ftype = build_function_type (long_integer_type_node, tmp);
460 gnat_define_builtin ("__builtin_expect", ftype, BUILT_IN_EXPECT,
461 "__builtin_expect", true);
462
463 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
464 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
465 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
466 ftype = build_function_type (ptr_void_type_node, tmp);
467 gnat_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY,
468 "memcpy", false);
469
470 tmp = tree_cons (NULL_TREE, integer_type_node, void_list_node);
471 ftype = build_function_type (integer_type_node, tmp);
472 gnat_define_builtin ("__builtin_clz", ftype, BUILT_IN_CLZ, "clz", true);
473
474 tmp = tree_cons (NULL_TREE, long_integer_type_node, void_list_node);
475 ftype = build_function_type (integer_type_node, tmp);
476 gnat_define_builtin ("__builtin_clzl", ftype, BUILT_IN_CLZL, "clzl", true);
477
478 tmp = tree_cons (NULL_TREE, long_long_integer_type_node, void_list_node);
479 ftype = build_function_type (integer_type_node, tmp);
480 gnat_define_builtin ("__builtin_clzll", ftype, BUILT_IN_CLZLL, "clzll",
481 true);
482
483 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
484 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
485 tmp = tree_cons (NULL_TREE, ptr_void_type_node, tmp);
486 ftype = build_function_type (void_type_node, tmp);
487 gnat_define_builtin ("__builtin_init_trampoline", ftype,
488 BUILT_IN_INIT_TRAMPOLINE, "init_trampoline", false);
489
490 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
491 ftype = build_function_type (ptr_void_type_node, tmp);
492 gnat_define_builtin ("__builtin_adjust_trampoline", ftype,
493 BUILT_IN_ADJUST_TRAMPOLINE, "adjust_trampoline", true);
494
495 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
496 tmp = tree_cons (NULL_TREE, size_type_node, void_list_node);
497 ftype = build_function_type (ptr_void_type_node, tmp);
498 gnat_define_builtin ("__builtin_stack_alloc", ftype, BUILT_IN_STACK_ALLOC,
499 "stack_alloc", false);
500
501 /* The stack_save and stack_restore builtins aren't used directly. They
502 are inserted during gimplification to implement stack_alloc calls. */
503 ftype = build_function_type (ptr_void_type_node, void_list_node);
504 gnat_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE,
505 "stack_save", false);
506 tmp = tree_cons (NULL_TREE, ptr_void_type_node, void_list_node);
507 ftype = build_function_type (void_type_node, tmp);
508 gnat_define_builtin ("__builtin_stack_restore", ftype,
509 BUILT_IN_STACK_RESTORE, "stack_restore", false);
510 }
511
512
513 /* Create the predefined scalar types such as `integer_type_node' needed
514 in the gcc back-end and initialize the global binding level. */
515
516 void
517 init_gigi_decls (tree long_long_float_type, tree exception_type)
518 {
519 tree endlink, decl;
520 unsigned int i;
521
522 /* Set the types that GCC and Gigi use from the front end. We would like
523 to do this for char_type_node, but it needs to correspond to the C
524 char type. */
525 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
526 {
527 /* In this case, the builtin floating point types are VAX float,
528 so make up a type for use. */
529 longest_float_type_node = make_node (REAL_TYPE);
530 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
531 layout_type (longest_float_type_node);
532 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
533 longest_float_type_node));
534 }
535 else
536 longest_float_type_node = TREE_TYPE (long_long_float_type);
537
538 except_type_node = TREE_TYPE (exception_type);
539
540 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
541 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
542 unsigned_type_node));
543
544 void_type_decl_node
545 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
546 void_type_node));
547
548 void_ftype = build_function_type (void_type_node, NULL_TREE);
549 ptr_void_ftype = build_pointer_type (void_ftype);
550
551 /* Now declare runtime functions. */
552 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
553
554 /* malloc is a function declaration tree for a function to allocate
555 memory. */
556 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
557 NULL_TREE,
558 build_function_type (ptr_void_type_node,
559 tree_cons (NULL_TREE,
560 sizetype,
561 endlink)),
562 NULL_TREE, 0, 1, 1, 0);
563
564 /* free is a function declaration tree for a function to free memory. */
565 free_decl
566 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
567 build_function_type (void_type_node,
568 tree_cons (NULL_TREE,
569 ptr_void_type_node,
570 endlink)),
571 NULL_TREE, 0, 1, 1, 0);
572
573 /* Make the types and functions used for exception processing. */
574 jmpbuf_type
575 = build_array_type (gnat_type_for_mode (Pmode, 0),
576 build_index_type (build_int_2 (5, 0)));
577 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
578 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
579
580 /* Functions to get and set the jumpbuf pointer for the current thread. */
581 get_jmpbuf_decl
582 = create_subprog_decl
583 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
584 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
585 NULL_TREE, 0, 1, 1, 0);
586
587 set_jmpbuf_decl
588 = create_subprog_decl
589 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
590 NULL_TREE,
591 build_function_type (void_type_node,
592 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
593 NULL_TREE, 0, 1, 1, 0);
594
595 /* Function to get the current exception. */
596 get_excptr_decl
597 = create_subprog_decl
598 (get_identifier ("system__soft_links__get_gnat_exception"),
599 NULL_TREE,
600 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
601 NULL_TREE, 0, 1, 1, 0);
602
603 /* Functions that raise exceptions. */
604 raise_nodefer_decl
605 = create_subprog_decl
606 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
607 build_function_type (void_type_node,
608 tree_cons (NULL_TREE,
609 build_pointer_type (except_type_node),
610 endlink)),
611 NULL_TREE, 0, 1, 1, 0);
612
613 /* Hooks to call when entering/leaving an exception handler. */
614 begin_handler_decl
615 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
616 build_function_type (void_type_node,
617 tree_cons (NULL_TREE,
618 ptr_void_type_node,
619 endlink)),
620 NULL_TREE, 0, 1, 1, 0);
621
622 end_handler_decl
623 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
624 build_function_type (void_type_node,
625 tree_cons (NULL_TREE,
626 ptr_void_type_node,
627 endlink)),
628 NULL_TREE, 0, 1, 1, 0);
629
630 /* If in no exception handlers mode, all raise statements are redirected to
631 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
632 this procedure will never be called in this mode. */
633 if (No_Exception_Handlers_Set ())
634 {
635 decl
636 = create_subprog_decl
637 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
638 build_function_type (void_type_node,
639 tree_cons (NULL_TREE,
640 build_pointer_type (char_type_node),
641 tree_cons (NULL_TREE,
642 integer_type_node,
643 endlink))),
644 NULL_TREE, 0, 1, 1, 0);
645
646 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
647 gnat_raise_decls[i] = decl;
648 }
649 else
650 /* Otherwise, make one decl for each exception reason. */
651 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
652 {
653 char name[17];
654
655 sprintf (name, "__gnat_rcheck_%.2d", i);
656 gnat_raise_decls[i]
657 = create_subprog_decl
658 (get_identifier (name), NULL_TREE,
659 build_function_type (void_type_node,
660 tree_cons (NULL_TREE,
661 build_pointer_type
662 (char_type_node),
663 tree_cons (NULL_TREE,
664 integer_type_node,
665 endlink))),
666 NULL_TREE, 0, 1, 1, 0);
667 }
668
669 /* Indicate that these never return. */
670 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
671 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
672 TREE_TYPE (raise_nodefer_decl)
673 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
674 TYPE_QUAL_VOLATILE);
675
676 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
677 {
678 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
679 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
680 TREE_TYPE (gnat_raise_decls[i])
681 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
682 TYPE_QUAL_VOLATILE);
683 }
684
685 /* setjmp returns an integer and has one operand, which is a pointer to
686 a jmpbuf. */
687 setjmp_decl
688 = create_subprog_decl
689 (get_identifier ("__builtin_setjmp"), NULL_TREE,
690 build_function_type (integer_type_node,
691 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
692 NULL_TREE, 0, 1, 1, 0);
693
694 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
695 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
696
697 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
698 address. */
699 update_setjmp_buf_decl
700 = create_subprog_decl
701 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
702 build_function_type (void_type_node,
703 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
704 NULL_TREE, 0, 1, 1, 0);
705
706 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
707 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
708
709 main_identifier_node = get_identifier ("main");
710 }
711 \f
712 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
713 nodes (FIELDLIST), finish constructing the record or union type.
714 If HAS_REP is nonzero, this record has a rep clause; don't call
715 layout_type but merely set the size and alignment ourselves.
716 If DEFER_DEBUG is nonzero, do not call the debugging routines
717 on this type; it will be done later. */
718
719 void
720 finish_record_type (tree record_type,
721 tree fieldlist,
722 int has_rep,
723 int defer_debug)
724 {
725 enum tree_code code = TREE_CODE (record_type);
726 tree ada_size = bitsize_zero_node;
727 tree size = bitsize_zero_node;
728 tree size_unit = size_zero_node;
729 int var_size = 0;
730 tree field;
731
732 TYPE_FIELDS (record_type) = fieldlist;
733
734 if (TYPE_NAME (record_type) != 0
735 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
736 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
737 else
738 TYPE_STUB_DECL (record_type)
739 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
740 record_type));
741
742 /* We don't need both the typedef name and the record name output in
743 the debugging information, since they are the same. */
744 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
745
746 /* Globally initialize the record first. If this is a rep'ed record,
747 that just means some initializations; otherwise, layout the record. */
748
749 if (has_rep)
750 {
751 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
752 TYPE_MODE (record_type) = BLKmode;
753 if (TYPE_SIZE (record_type) == 0)
754 {
755 TYPE_SIZE (record_type) = bitsize_zero_node;
756 TYPE_SIZE_UNIT (record_type) = size_zero_node;
757 }
758 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
759 out just like a UNION_TYPE, since the size will be fixed. */
760 else if (code == QUAL_UNION_TYPE)
761 code = UNION_TYPE;
762 }
763 else
764 {
765 /* Ensure there isn't a size already set. There can be in an error
766 case where there is a rep clause but all fields have errors and
767 no longer have a position. */
768 TYPE_SIZE (record_type) = 0;
769 layout_type (record_type);
770 }
771
772 /* At this point, the position and size of each field is known. It was
773 either set before entry by a rep clause, or by laying out the type above.
774
775 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
776 to compute the Ada size; the GCC size and alignment (for rep'ed records
777 that are not padding types); and the mode (for rep'ed records). We also
778 clear the DECL_BIT_FIELD indication for the cases we know have not been
779 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
780
781 if (code == QUAL_UNION_TYPE)
782 fieldlist = nreverse (fieldlist);
783
784 for (field = fieldlist; field; field = TREE_CHAIN (field))
785 {
786 tree pos = bit_position (field);
787
788 tree type = TREE_TYPE (field);
789 tree this_size = DECL_SIZE (field);
790 tree this_size_unit = DECL_SIZE_UNIT (field);
791 tree this_ada_size = DECL_SIZE (field);
792
793 /* We need to make an XVE/XVU record if any field has variable size,
794 whether or not the record does. For example, if we have an union,
795 it may be that all fields, rounded up to the alignment, have the
796 same size, in which case we'll use that size. But the debug
797 output routines (except Dwarf2) won't be able to output the fields,
798 so we need to make the special record. */
799 if (TREE_CODE (this_size) != INTEGER_CST)
800 var_size = 1;
801
802 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
803 || TREE_CODE (type) == QUAL_UNION_TYPE)
804 && ! TYPE_IS_FAT_POINTER_P (type)
805 && ! TYPE_CONTAINS_TEMPLATE_P (type)
806 && TYPE_ADA_SIZE (type) != 0)
807 this_ada_size = TYPE_ADA_SIZE (type);
808
809 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
810 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
811 && value_factor_p (pos, BITS_PER_UNIT)
812 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
813 DECL_BIT_FIELD (field) = 0;
814
815 /* If we still have DECL_BIT_FIELD set at this point, we know the field
816 is technically not addressable. Except that it can actually be
817 addressed if the field is BLKmode and happens to be properly
818 aligned. */
819 DECL_NONADDRESSABLE_P (field)
820 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
821
822 if (has_rep && ! DECL_BIT_FIELD (field))
823 TYPE_ALIGN (record_type)
824 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
825
826 switch (code)
827 {
828 case UNION_TYPE:
829 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
830 size = size_binop (MAX_EXPR, size, this_size);
831 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
832 break;
833
834 case QUAL_UNION_TYPE:
835 ada_size
836 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
837 this_ada_size, ada_size));
838 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
839 this_size, size));
840 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
841 this_size_unit, size_unit));
842 break;
843
844 case RECORD_TYPE:
845 /* Since we know here that all fields are sorted in order of
846 increasing bit position, the size of the record is one
847 higher than the ending bit of the last field processed
848 unless we have a rep clause, since in that case we might
849 have a field outside a QUAL_UNION_TYPE that has a higher ending
850 position. So use a MAX in that case. Also, if this field is a
851 QUAL_UNION_TYPE, we need to take into account the previous size in
852 the case of empty variants. */
853 ada_size
854 = merge_sizes (ada_size, pos, this_ada_size,
855 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
856 size = merge_sizes (size, pos, this_size,
857 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
858 size_unit
859 = merge_sizes (size_unit, byte_position (field), this_size_unit,
860 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
861 break;
862
863 default:
864 abort ();
865 }
866 }
867
868 if (code == QUAL_UNION_TYPE)
869 nreverse (fieldlist);
870
871 /* If this is a padding record, we never want to make the size smaller than
872 what was specified in it, if any. */
873 if (TREE_CODE (record_type) == RECORD_TYPE
874 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
875 {
876 size = TYPE_SIZE (record_type);
877 size_unit = TYPE_SIZE_UNIT (record_type);
878 }
879
880 /* Now set any of the values we've just computed that apply. */
881 if (! TYPE_IS_FAT_POINTER_P (record_type)
882 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
883 SET_TYPE_ADA_SIZE (record_type, ada_size);
884
885 if (has_rep)
886 {
887 if (! (TREE_CODE (record_type) == RECORD_TYPE
888 && TYPE_IS_PADDING_P (record_type)
889 && CONTAINS_PLACEHOLDER_P (size)))
890 {
891 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
892 TYPE_SIZE_UNIT (record_type)
893 = round_up (size_unit,
894 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
895 }
896
897 compute_record_mode (record_type);
898 }
899
900 if (! defer_debug)
901 {
902 /* If this record is of variable size, rename it so that the
903 debugger knows it is and make a new, parallel, record
904 that tells the debugger how the record is laid out. See
905 exp_dbug.ads. But don't do this for records that are padding
906 since they confuse GDB. */
907 if (var_size
908 && ! (TREE_CODE (record_type) == RECORD_TYPE
909 && TYPE_IS_PADDING_P (record_type)))
910 {
911 tree new_record_type
912 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
913 ? UNION_TYPE : TREE_CODE (record_type));
914 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
915 tree new_id
916 = concat_id_with_name (orig_id,
917 TREE_CODE (record_type) == QUAL_UNION_TYPE
918 ? "XVU" : "XVE");
919 tree last_pos = bitsize_zero_node;
920 tree old_field;
921 tree prev_old_field = 0;
922
923 TYPE_NAME (new_record_type) = new_id;
924 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
925 TYPE_STUB_DECL (new_record_type)
926 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
927 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
928 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
929 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
930 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
931
932 /* Now scan all the fields, replacing each field with a new
933 field corresponding to the new encoding. */
934 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
935 old_field = TREE_CHAIN (old_field))
936 {
937 tree field_type = TREE_TYPE (old_field);
938 tree field_name = DECL_NAME (old_field);
939 tree new_field;
940 tree curpos = bit_position (old_field);
941 int var = 0;
942 unsigned int align = 0;
943 tree pos;
944
945 /* See how the position was modified from the last position.
946
947 There are two basic cases we support: a value was added
948 to the last position or the last position was rounded to
949 a boundary and they something was added. Check for the
950 first case first. If not, see if there is any evidence
951 of rounding. If so, round the last position and try
952 again.
953
954 If this is a union, the position can be taken as zero. */
955
956 if (TREE_CODE (new_record_type) == UNION_TYPE)
957 pos = bitsize_zero_node, align = 0;
958 else
959 pos = compute_related_constant (curpos, last_pos);
960
961 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
962 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
963 {
964 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
965 pos = compute_related_constant (curpos,
966 round_up (last_pos, align));
967 }
968 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
969 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
970 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
971 && host_integerp (TREE_OPERAND
972 (TREE_OPERAND (curpos, 0), 1),
973 1))
974 {
975 align
976 = tree_low_cst
977 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
978 pos = compute_related_constant (curpos,
979 round_up (last_pos, align));
980 }
981 else if (potential_alignment_gap (prev_old_field, old_field,
982 pos))
983 {
984 align = TYPE_ALIGN (field_type);
985 pos = compute_related_constant (curpos,
986 round_up (last_pos, align));
987 }
988
989 /* If we can't compute a position, set it to zero.
990
991 ??? We really should abort here, but it's too much work
992 to get this correct for all cases. */
993
994 if (pos == 0)
995 pos = bitsize_zero_node;
996
997 /* See if this type is variable-size and make a new type
998 and indicate the indirection if so. */
999 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1000 {
1001 field_type = build_pointer_type (field_type);
1002 var = 1;
1003 }
1004
1005 /* Make a new field name, if necessary. */
1006 if (var || align != 0)
1007 {
1008 char suffix[6];
1009
1010 if (align != 0)
1011 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1012 align / BITS_PER_UNIT);
1013 else
1014 strcpy (suffix, "XVL");
1015
1016 field_name = concat_id_with_name (field_name, suffix);
1017 }
1018
1019 new_field = create_field_decl (field_name, field_type,
1020 new_record_type, 0,
1021 DECL_SIZE (old_field), pos, 0);
1022 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1023 TYPE_FIELDS (new_record_type) = new_field;
1024
1025 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1026 zero. The only time it's not the last field of the record
1027 is when there are other components at fixed positions after
1028 it (meaning there was a rep clause for every field) and we
1029 want to be able to encode them. */
1030 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1031 (TREE_CODE (TREE_TYPE (old_field))
1032 == QUAL_UNION_TYPE)
1033 ? bitsize_zero_node
1034 : DECL_SIZE (old_field));
1035 prev_old_field = old_field;
1036 }
1037
1038 TYPE_FIELDS (new_record_type)
1039 = nreverse (TYPE_FIELDS (new_record_type));
1040
1041 rest_of_type_compilation (new_record_type, global_bindings_p ());
1042 }
1043
1044 rest_of_type_compilation (record_type, global_bindings_p ());
1045 }
1046 }
1047
1048 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1049 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1050 if this represents a QUAL_UNION_TYPE in which case we must look for
1051 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1052 is nonzero, we must take the MAX of the end position of this field
1053 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1054
1055 We return an expression for the size. */
1056
1057 static tree
1058 merge_sizes (tree last_size,
1059 tree first_bit,
1060 tree size,
1061 int special,
1062 int has_rep)
1063 {
1064 tree type = TREE_TYPE (last_size);
1065 tree new;
1066
1067 if (! special || TREE_CODE (size) != COND_EXPR)
1068 {
1069 new = size_binop (PLUS_EXPR, first_bit, size);
1070 if (has_rep)
1071 new = size_binop (MAX_EXPR, last_size, new);
1072 }
1073
1074 else
1075 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1076 integer_zerop (TREE_OPERAND (size, 1))
1077 ? last_size : merge_sizes (last_size, first_bit,
1078 TREE_OPERAND (size, 1),
1079 1, has_rep),
1080 integer_zerop (TREE_OPERAND (size, 2))
1081 ? last_size : merge_sizes (last_size, first_bit,
1082 TREE_OPERAND (size, 2),
1083 1, has_rep)));
1084
1085 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1086 when fed through substitute_in_expr) into thinking that a constant
1087 size is not constant. */
1088 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1089 new = TREE_OPERAND (new, 0);
1090
1091 return new;
1092 }
1093
1094 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1095 related by the addition of a constant. Return that constant if so. */
1096
1097 static tree
1098 compute_related_constant (tree op0, tree op1)
1099 {
1100 tree op0_var, op1_var;
1101 tree op0_con = split_plus (op0, &op0_var);
1102 tree op1_con = split_plus (op1, &op1_var);
1103 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1104
1105 if (operand_equal_p (op0_var, op1_var, 0))
1106 return result;
1107 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1108 return result;
1109 else
1110 return 0;
1111 }
1112
1113 /* Utility function of above to split a tree OP which may be a sum, into a
1114 constant part, which is returned, and a variable part, which is stored
1115 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1116 bitsizetype. */
1117
1118 static tree
1119 split_plus (tree in, tree *pvar)
1120 {
1121 /* Strip NOPS in order to ease the tree traversal and maximize the
1122 potential for constant or plus/minus discovery. We need to be careful
1123 to always return and set *pvar to bitsizetype trees, but it's worth
1124 the effort. */
1125 STRIP_NOPS (in);
1126
1127 *pvar = convert (bitsizetype, in);
1128
1129 if (TREE_CODE (in) == INTEGER_CST)
1130 {
1131 *pvar = bitsize_zero_node;
1132 return convert (bitsizetype, in);
1133 }
1134 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1135 {
1136 tree lhs_var, rhs_var;
1137 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1138 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1139
1140 if (lhs_var == TREE_OPERAND (in, 0)
1141 && rhs_var == TREE_OPERAND (in, 1))
1142 return bitsize_zero_node;
1143
1144 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1145 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1146 }
1147 else
1148 return bitsize_zero_node;
1149 }
1150 \f
1151 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1152 subprogram. If it is void_type_node, then we are dealing with a procedure,
1153 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1154 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1155 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1156 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1157 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1158 RETURNS_WITH_DSP is nonzero if the function is to return with a
1159 depressed stack pointer. */
1160
1161 tree
1162 create_subprog_type (tree return_type,
1163 tree param_decl_list,
1164 tree cico_list,
1165 int returns_unconstrained,
1166 int returns_by_ref,
1167 int returns_with_dsp)
1168 {
1169 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1170 the subprogram formal parameters. This list is generated by traversing the
1171 input list of PARM_DECL nodes. */
1172 tree param_type_list = NULL;
1173 tree param_decl;
1174 tree type;
1175
1176 for (param_decl = param_decl_list; param_decl;
1177 param_decl = TREE_CHAIN (param_decl))
1178 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1179 param_type_list);
1180
1181 /* The list of the function parameter types has to be terminated by the void
1182 type to signal to the back-end that we are not dealing with a variable
1183 parameter subprogram, but that the subprogram has a fixed number of
1184 parameters. */
1185 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1186
1187 /* The list of argument types has been created in reverse
1188 so nreverse it. */
1189 param_type_list = nreverse (param_type_list);
1190
1191 type = build_function_type (return_type, param_type_list);
1192
1193 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1194 or the new type should, make a copy of TYPE. Likewise for
1195 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1196 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1197 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1198 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1199 type = copy_type (type);
1200
1201 SET_TYPE_CI_CO_LIST (type, cico_list);
1202 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1203 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1204 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1205 return type;
1206 }
1207 \f
1208 /* Return a copy of TYPE but safe to modify in any way. */
1209
1210 tree
1211 copy_type (tree type)
1212 {
1213 tree new = copy_node (type);
1214
1215 /* copy_node clears this field instead of copying it, because it is
1216 aliased with TREE_CHAIN. */
1217 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1218
1219 TYPE_POINTER_TO (new) = 0;
1220 TYPE_REFERENCE_TO (new) = 0;
1221 TYPE_MAIN_VARIANT (new) = new;
1222 TYPE_NEXT_VARIANT (new) = 0;
1223
1224 return new;
1225 }
1226 \f
1227 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1228 TYPE_INDEX_TYPE is INDEX. */
1229
1230 tree
1231 create_index_type (tree min, tree max, tree index)
1232 {
1233 /* First build a type for the desired range. */
1234 tree type = build_index_2_type (min, max);
1235
1236 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1237 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1238 is set, but not to INDEX, make a copy of this type with the requested
1239 index type. Note that we have no way of sharing these types, but that's
1240 only a small hole. */
1241 if (TYPE_INDEX_TYPE (type) == index)
1242 return type;
1243 else if (TYPE_INDEX_TYPE (type) != 0)
1244 type = copy_type (type);
1245
1246 SET_TYPE_INDEX_TYPE (type, index);
1247 return type;
1248 }
1249 \f
1250 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1251 string) and TYPE is a ..._TYPE node giving its data type.
1252 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1253 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1254 information about this type. */
1255
1256 tree
1257 create_type_decl (tree type_name,
1258 tree type,
1259 struct attrib *attr_list,
1260 int artificial_p,
1261 int debug_info_p)
1262 {
1263 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1264 enum tree_code code = TREE_CODE (type);
1265
1266 DECL_ARTIFICIAL (type_decl) = artificial_p;
1267 pushdecl (type_decl);
1268 process_attributes (type_decl, attr_list);
1269
1270 /* Pass type declaration information to the debugger unless this is an
1271 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1272 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1273 a dummy type, which will be completed later, or a type for which
1274 debugging information was not requested. */
1275 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1276 || ! debug_info_p)
1277 DECL_IGNORED_P (type_decl) = 1;
1278 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1279 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1280 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1281 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1282
1283 return type_decl;
1284 }
1285
1286 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1287 ASM_NAME is its assembler name (if provided). TYPE is its data type
1288 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1289 expression; NULL_TREE if none.
1290
1291 CONST_FLAG is nonzero if this variable is constant.
1292
1293 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1294 the current compilation unit. This flag should be set when processing the
1295 variable definitions in a package specification. EXTERN_FLAG is nonzero
1296 when processing an external variable declaration (as opposed to a
1297 definition: no storage is to be allocated for the variable here).
1298
1299 STATIC_FLAG is only relevant when not at top level. In that case
1300 it indicates whether to always allocate storage to the variable. */
1301
1302 tree
1303 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1304 int const_flag, int public_flag, int extern_flag,
1305 int static_flag, struct attrib *attr_list)
1306 {
1307 int init_const
1308 = (var_init == 0
1309 ? 0
1310 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1311 && (global_bindings_p () || static_flag
1312 ? 0 != initializer_constant_valid_p (var_init,
1313 TREE_TYPE (var_init))
1314 : TREE_CONSTANT (var_init))));
1315 tree var_decl
1316 = build_decl ((const_flag && init_const
1317 /* Only make a CONST_DECL for sufficiently-small objects.
1318 We consider complex double "sufficiently-small" */
1319 && TYPE_SIZE (type) != 0
1320 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1321 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1322 GET_MODE_SIZE (DCmode)))
1323 ? CONST_DECL : VAR_DECL, var_name, type);
1324
1325 /* If this is external, throw away any initializations unless this is a
1326 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1327 If we are defining a global here, leave a constant initialization and
1328 save any variable elaborations for the elaboration routine. If we are
1329 just annotating types, throw away the initialization if it isn't a
1330 constant. */
1331
1332 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1333 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1334 var_init = 0;
1335
1336 if (global_bindings_p () && var_init != 0 && ! init_const)
1337 {
1338 add_pending_elaborations (var_decl, var_init);
1339 var_init = 0;
1340 }
1341
1342 DECL_INITIAL (var_decl) = var_init;
1343 TREE_READONLY (var_decl) = const_flag;
1344 DECL_EXTERNAL (var_decl) = extern_flag;
1345 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1346 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1347 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1348 = TYPE_VOLATILE (type);
1349
1350 /* At the global binding level we need to allocate static storage for the
1351 variable if and only if its not external. If we are not at the top level
1352 we allocate automatic storage unless requested not to. */
1353 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1354
1355 if (asm_name != 0)
1356 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1357
1358 process_attributes (var_decl, attr_list);
1359
1360 /* Add this decl to the current binding level and generate any
1361 needed code and RTL. */
1362 var_decl = pushdecl (var_decl);
1363
1364 if (TREE_SIDE_EFFECTS (var_decl))
1365 TREE_ADDRESSABLE (var_decl) = 1;
1366
1367 if (TREE_CODE (var_decl) != CONST_DECL)
1368 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1369
1370 return var_decl;
1371 }
1372 \f
1373 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1374 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1375 this field is in a record type with a "pragma pack". If SIZE is nonzero
1376 it is the specified size for this field. If POS is nonzero, it is the bit
1377 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1378 the address of this field for aliasing purposes. */
1379
1380 tree
1381 create_field_decl (tree field_name,
1382 tree field_type,
1383 tree record_type,
1384 int packed,
1385 tree size,
1386 tree pos,
1387 int addressable)
1388 {
1389 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1390
1391 DECL_CONTEXT (field_decl) = record_type;
1392 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1393
1394 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1395 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1396 if (packed && TYPE_MODE (field_type) == BLKmode)
1397 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1398
1399 /* If a size is specified, use it. Otherwise, if the record type is packed
1400 compute a size to use, which may differ from the object's natural size.
1401 We always set a size in this case to trigger the checks for bitfield
1402 creation below, which is typically required when no position has been
1403 specified. */
1404 if (size != 0)
1405 size = convert (bitsizetype, size);
1406 else if (packed == 1)
1407 {
1408 size = rm_size (field_type);
1409
1410 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1411 byte. */
1412 if (TREE_CODE (size) == INTEGER_CST
1413 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1414 size = round_up (size, BITS_PER_UNIT);
1415 }
1416
1417 /* Make a bitfield if a size is specified for two reasons: first if the size
1418 differs from the natural size. Second, if the alignment is insufficient.
1419 There are a number of ways the latter can be true.
1420
1421 We never make a bitfield if the type of the field has a nonconstant size,
1422 or if it is claimed to be addressable, because no such entity requiring
1423 bitfield operations should reach here.
1424
1425 We do *preventively* make a bitfield when there might be the need for it
1426 but we don't have all the necessary information to decide, as is the case
1427 of a field with no specified position in a packed record.
1428
1429 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1430 in layout_decl or finish_record_type to clear the bit_field indication if
1431 it is in fact not needed. */
1432 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1433 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1434 && ! addressable
1435 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1436 || (pos != 0
1437 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1438 bitsize_int (TYPE_ALIGN
1439 (field_type)))))
1440 || packed
1441 || (TYPE_ALIGN (record_type) != 0
1442 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1443 {
1444 DECL_BIT_FIELD (field_decl) = 1;
1445 DECL_SIZE (field_decl) = size;
1446 if (! packed && pos == 0)
1447 DECL_ALIGN (field_decl)
1448 = (TYPE_ALIGN (record_type) != 0
1449 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1450 : TYPE_ALIGN (field_type));
1451 }
1452
1453 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1454 DECL_ALIGN (field_decl)
1455 = MAX (DECL_ALIGN (field_decl),
1456 DECL_BIT_FIELD (field_decl) ? 1
1457 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1458 : TYPE_ALIGN (field_type));
1459
1460 if (pos != 0)
1461 {
1462 /* We need to pass in the alignment the DECL is known to have.
1463 This is the lowest-order bit set in POS, but no more than
1464 the alignment of the record, if one is specified. Note
1465 that an alignment of 0 is taken as infinite. */
1466 unsigned int known_align;
1467
1468 if (host_integerp (pos, 1))
1469 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1470 else
1471 known_align = BITS_PER_UNIT;
1472
1473 if (TYPE_ALIGN (record_type)
1474 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1475 known_align = TYPE_ALIGN (record_type);
1476
1477 layout_decl (field_decl, known_align);
1478 SET_DECL_OFFSET_ALIGN (field_decl,
1479 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1480 : BITS_PER_UNIT);
1481 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1482 &DECL_FIELD_BIT_OFFSET (field_decl),
1483 DECL_OFFSET_ALIGN (field_decl), pos);
1484
1485 DECL_HAS_REP_P (field_decl) = 1;
1486 }
1487
1488 /* If the field type is passed by reference, we will have pointers to the
1489 field, so it is addressable. */
1490 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1491 addressable = 1;
1492
1493 /* ??? For now, we say that any field of aggregate type is addressable
1494 because the front end may take 'Reference of it. */
1495 if (AGGREGATE_TYPE_P (field_type))
1496 addressable = 1;
1497
1498 /* Mark the decl as nonaddressable if it is indicated so semantically,
1499 meaning we won't ever attempt to take the address of the field.
1500
1501 It may also be "technically" nonaddressable, meaning that even if we
1502 attempt to take the field's address we will actually get the address of a
1503 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1504 we have at this point is not accurate enough, so we don't account for
1505 this here and let finish_record_type decide. */
1506 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1507
1508 return field_decl;
1509 }
1510
1511 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1512 effects, has the value of zero. */
1513
1514 static int
1515 value_zerop (tree exp)
1516 {
1517 if (TREE_CODE (exp) == COMPOUND_EXPR)
1518 return value_zerop (TREE_OPERAND (exp, 1));
1519
1520 return integer_zerop (exp);
1521 }
1522 \f
1523 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1524 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1525 readonly (either an IN parameter or an address of a pass-by-ref
1526 parameter). */
1527
1528 tree
1529 create_param_decl (tree param_name, tree param_type, int readonly)
1530 {
1531 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1532
1533 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1534 lead to various ABI violations. */
1535 if (targetm.calls.promote_prototypes (param_type)
1536 && (TREE_CODE (param_type) == INTEGER_TYPE
1537 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1538 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1539 {
1540 /* We have to be careful about biased types here. Make a subtype
1541 of integer_type_node with the proper biasing. */
1542 if (TREE_CODE (param_type) == INTEGER_TYPE
1543 && TYPE_BIASED_REPRESENTATION_P (param_type))
1544 {
1545 param_type
1546 = copy_type (build_range_type (integer_type_node,
1547 TYPE_MIN_VALUE (param_type),
1548 TYPE_MAX_VALUE (param_type)));
1549
1550 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1551 }
1552 else
1553 param_type = integer_type_node;
1554 }
1555
1556 DECL_ARG_TYPE (param_decl) = param_type;
1557 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1558 TREE_READONLY (param_decl) = readonly;
1559 return param_decl;
1560 }
1561 \f
1562 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1563
1564 void
1565 process_attributes (tree decl, struct attrib *attr_list)
1566 {
1567 for (; attr_list; attr_list = attr_list->next)
1568 switch (attr_list->type)
1569 {
1570 case ATTR_MACHINE_ATTRIBUTE:
1571 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1572 NULL_TREE),
1573 ATTR_FLAG_TYPE_IN_PLACE);
1574 break;
1575
1576 case ATTR_LINK_ALIAS:
1577 TREE_STATIC (decl) = 1;
1578 assemble_alias (decl, attr_list->name);
1579 break;
1580
1581 case ATTR_WEAK_EXTERNAL:
1582 if (SUPPORTS_WEAK)
1583 declare_weak (decl);
1584 else
1585 post_error ("?weak declarations not supported on this target",
1586 attr_list->error_point);
1587 break;
1588
1589 case ATTR_LINK_SECTION:
1590 if (targetm.have_named_sections)
1591 {
1592 DECL_SECTION_NAME (decl)
1593 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1594 IDENTIFIER_POINTER (attr_list->name));
1595 }
1596 else
1597 post_error ("?section attributes are not supported for this target",
1598 attr_list->error_point);
1599 break;
1600 }
1601 }
1602 \f
1603 /* Add some pending elaborations on the list. */
1604
1605 void
1606 add_pending_elaborations (tree var_decl, tree var_init)
1607 {
1608 if (var_init != 0)
1609 Check_Elaboration_Code_Allowed (error_gnat_node);
1610
1611 pending_elaborations
1612 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1613 }
1614
1615 /* Obtain any pending elaborations and clear the old list. */
1616
1617 tree
1618 get_pending_elaborations (void)
1619 {
1620 /* Each thing added to the list went on the end; we want it on the
1621 beginning. */
1622 tree result = TREE_CHAIN (pending_elaborations);
1623
1624 TREE_CHAIN (pending_elaborations) = 0;
1625 return result;
1626 }
1627
1628 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1629 of 2. */
1630
1631 static int
1632 value_factor_p (tree value, int factor)
1633 {
1634 if (host_integerp (value, 1))
1635 return tree_low_cst (value, 1) % factor == 0;
1636
1637 if (TREE_CODE (value) == MULT_EXPR)
1638 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1639 || value_factor_p (TREE_OPERAND (value, 1), factor));
1640
1641 return 0;
1642 }
1643
1644 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1645 unless we can prove these 2 fields are laid out in such a way that no gap
1646 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1647 is the distance in bits between the end of PREV_FIELD and the starting
1648 position of CURR_FIELD. It is ignored if null. */
1649
1650 static int
1651 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1652 {
1653 /* If this is the first field of the record, there cannot be any gap */
1654 if (!prev_field)
1655 return 0;
1656
1657 /* If the previous field is a union type, then return False: The only
1658 time when such a field is not the last field of the record is when
1659 there are other components at fixed positions after it (meaning there
1660 was a rep clause for every field), in which case we don't want the
1661 alignment constraint to override them. */
1662 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1663 return 0;
1664
1665 /* If the distance between the end of prev_field and the begining of
1666 curr_field is constant, then there is a gap if the value of this
1667 constant is not null. */
1668 if (offset && host_integerp (offset, 1))
1669 return (!integer_zerop (offset));
1670
1671 /* If the size and position of the previous field are constant,
1672 then check the sum of this size and position. There will be a gap
1673 iff it is not multiple of the current field alignment. */
1674 if (host_integerp (DECL_SIZE (prev_field), 1)
1675 && host_integerp (bit_position (prev_field), 1))
1676 return ((tree_low_cst (bit_position (prev_field), 1)
1677 + tree_low_cst (DECL_SIZE (prev_field), 1))
1678 % DECL_ALIGN (curr_field) != 0);
1679
1680 /* If both the position and size of the previous field are multiples
1681 of the current field alignment, there can not be any gap. */
1682 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1683 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1684 return 0;
1685
1686 /* Fallback, return that there may be a potential gap */
1687 return 1;
1688 }
1689
1690 /* Return nonzero if there are pending elaborations. */
1691
1692 int
1693 pending_elaborations_p (void)
1694 {
1695 return TREE_CHAIN (pending_elaborations) != 0;
1696 }
1697
1698 /* Save a copy of the current pending elaboration list and make a new
1699 one. */
1700
1701 void
1702 push_pending_elaborations (void)
1703 {
1704 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1705
1706 p->next = elist_stack;
1707 p->elab_list = pending_elaborations;
1708 elist_stack = p;
1709 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1710 }
1711
1712 /* Pop the stack of pending elaborations. */
1713
1714 void
1715 pop_pending_elaborations (void)
1716 {
1717 struct e_stack *p = elist_stack;
1718
1719 pending_elaborations = p->elab_list;
1720 elist_stack = p->next;
1721 }
1722
1723 /* Return the current position in pending_elaborations so we can insert
1724 elaborations after that point. */
1725
1726 tree
1727 get_elaboration_location (void)
1728 {
1729 return tree_last (pending_elaborations);
1730 }
1731
1732 /* Insert the current elaborations after ELAB, which is in some elaboration
1733 list. */
1734
1735 void
1736 insert_elaboration_list (tree elab)
1737 {
1738 tree next = TREE_CHAIN (elab);
1739
1740 if (TREE_CHAIN (pending_elaborations))
1741 {
1742 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1743 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1744 TREE_CHAIN (pending_elaborations) = 0;
1745 }
1746 }
1747
1748 /* Returns a LABEL_DECL node for LABEL_NAME. */
1749
1750 tree
1751 create_label_decl (tree label_name)
1752 {
1753 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1754
1755 DECL_CONTEXT (label_decl) = current_function_decl;
1756 DECL_MODE (label_decl) = VOIDmode;
1757 DECL_SOURCE_LOCATION (label_decl) = input_location;
1758
1759 return label_decl;
1760 }
1761 \f
1762 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1763 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1764 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1765 PARM_DECL nodes chained through the TREE_CHAIN field).
1766
1767 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1768 appropriate fields in the FUNCTION_DECL. */
1769
1770 tree
1771 create_subprog_decl (tree subprog_name,
1772 tree asm_name,
1773 tree subprog_type,
1774 tree param_decl_list,
1775 int inline_flag,
1776 int public_flag,
1777 int extern_flag,
1778 struct attrib *attr_list)
1779 {
1780 tree return_type = TREE_TYPE (subprog_type);
1781 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1782
1783 /* If this is a function nested inside an inlined external function, it
1784 means we aren't going to compile the outer function unless it is
1785 actually inlined, so do the same for us. */
1786 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1787 && DECL_EXTERNAL (current_function_decl))
1788 extern_flag = 1;
1789
1790 DECL_EXTERNAL (subprog_decl) = extern_flag;
1791 TREE_PUBLIC (subprog_decl) = public_flag;
1792 TREE_STATIC (subprog_decl) = 1;
1793 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1794 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1795 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1796 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1797 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1798
1799 if (inline_flag)
1800 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1801
1802 if (asm_name != 0)
1803 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1804
1805 process_attributes (subprog_decl, attr_list);
1806
1807 /* Add this decl to the current binding level. */
1808 subprog_decl = pushdecl (subprog_decl);
1809
1810 /* Output the assembler code and/or RTL for the declaration. */
1811 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1812
1813 return subprog_decl;
1814 }
1815 \f
1816 /* Count how deep we are into nested functions. This is because
1817 we shouldn't call the backend function context routines unless we
1818 are in a nested function. */
1819
1820 static int function_nesting_depth;
1821
1822 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1823 body. This routine needs to be invoked before processing the declarations
1824 appearing in the subprogram. */
1825
1826 void
1827 begin_subprog_body (tree subprog_decl)
1828 {
1829 tree param_decl;
1830
1831 if (function_nesting_depth++ != 0)
1832 push_function_context ();
1833
1834 announce_function (subprog_decl);
1835
1836 /* Make this field nonzero so further routines know that this is not
1837 tentative. error_mark_node is replaced below with the adequate BLOCK. */
1838 DECL_INITIAL (subprog_decl) = error_mark_node;
1839
1840 /* This function exists in static storage. This does not mean `static' in
1841 the C sense! */
1842 TREE_STATIC (subprog_decl) = 1;
1843
1844 /* Enter a new binding level and show that all the parameters belong to
1845 this function. */
1846 current_function_decl = subprog_decl;
1847 gnat_pushlevel ();
1848
1849 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1850 param_decl = TREE_CHAIN (param_decl))
1851 DECL_CONTEXT (param_decl) = subprog_decl;
1852
1853 init_function_start (subprog_decl);
1854 expand_function_start (subprog_decl, 0);
1855 }
1856
1857 /* Finish the definition of the current subprogram and compile it all the way
1858 to assembler language output. BODY is the tree corresponding to
1859 the subprogram. */
1860
1861 void
1862 end_subprog_body (tree body)
1863 {
1864 tree fndecl = current_function_decl;
1865
1866 /* Mark the BLOCK for this level as being for this function and pop the
1867 level. Since the vars in it are the parameters, clear them. */
1868 BLOCK_VARS (current_binding_level->block) = 0;
1869 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1870 DECL_INITIAL (fndecl) = current_binding_level->block;
1871 gnat_poplevel ();
1872
1873 /* Deal with inline. If declared inline or we should default to inline,
1874 set the flag in the decl. */
1875 DECL_INLINE (fndecl)
1876 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1877
1878 /* Initialize the RTL code for the function. */
1879 allocate_struct_function (fndecl);
1880
1881 /* We handle pending sizes via the elaboration of types, so we don't
1882 need to save them. */
1883 get_pending_sizes ();
1884
1885 /* Mark the RESULT_DECL as being in this subprogram. */
1886 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1887
1888 DECL_SAVED_TREE (fndecl) = body;
1889
1890 current_function_decl = DECL_CONTEXT (fndecl);
1891
1892 /* If we're only annotating types, don't actually compile this function. */
1893 if (type_annotate_only)
1894 return;
1895
1896 /* We do different things for nested and non-nested functions.
1897 ??? This should be in cgraph. */
1898 if (!DECL_CONTEXT (fndecl))
1899 {
1900 gnat_gimplify_function (fndecl);
1901 lower_nested_functions (fndecl);
1902 gnat_finalize (fndecl);
1903 }
1904 else
1905 /* Register this function with cgraph just far enough to get it
1906 added to our parent's nested function list. */
1907 (void) cgraph_node (fndecl);
1908 }
1909
1910 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1911
1912 static void
1913 gnat_gimplify_function (tree fndecl)
1914 {
1915 struct cgraph_node *cgn;
1916
1917 dump_function (TDI_original, fndecl);
1918 gimplify_function_tree (fndecl);
1919 dump_function (TDI_generic, fndecl);
1920
1921 /* Convert all nested functions to GIMPLE now. We do things in this order
1922 so that items like VLA sizes are expanded properly in the context of the
1923 correct function. */
1924 cgn = cgraph_node (fndecl);
1925 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
1926 gnat_gimplify_function (cgn->decl);
1927 }
1928
1929 /* Give FNDECL and all its nested functions to cgraph for compilation. */
1930
1931 static void
1932 gnat_finalize (tree fndecl)
1933 {
1934 struct cgraph_node *cgn;
1935
1936 /* Finalize all nested functions now. */
1937 cgn = cgraph_node (fndecl);
1938 for (cgn = cgn->nested; cgn ; cgn = cgn->next_nested)
1939 gnat_finalize (cgn->decl);
1940
1941 cgraph_finalize_function (fndecl, false);
1942 }
1943 \f
1944 /* Return a definition for a builtin function named NAME and whose data type
1945 is TYPE. TYPE should be a function type with argument types.
1946 FUNCTION_CODE tells later passes how to compile calls to this function.
1947 See tree.h for its possible values.
1948
1949 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1950 the name to be called if we can't opencode the function. If
1951 ATTRS is nonzero, use that for the function attribute list. */
1952
1953 tree
1954 builtin_function (const char *name,
1955 tree type,
1956 int function_code,
1957 enum built_in_class class,
1958 const char *library_name,
1959 tree attrs)
1960 {
1961 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1962
1963 DECL_EXTERNAL (decl) = 1;
1964 TREE_PUBLIC (decl) = 1;
1965 if (library_name)
1966 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1967
1968 pushdecl (decl);
1969 DECL_BUILT_IN_CLASS (decl) = class;
1970 DECL_FUNCTION_CODE (decl) = function_code;
1971 if (attrs)
1972 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1973 return decl;
1974 }
1975
1976 /* Return an integer type with the number of bits of precision given by
1977 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1978 it is a signed type. */
1979
1980 tree
1981 gnat_type_for_size (unsigned precision, int unsignedp)
1982 {
1983 tree t;
1984 char type_name[20];
1985
1986 if (precision <= 2 * MAX_BITS_PER_WORD
1987 && signed_and_unsigned_types[precision][unsignedp] != 0)
1988 return signed_and_unsigned_types[precision][unsignedp];
1989
1990 if (unsignedp)
1991 t = make_unsigned_type (precision);
1992 else
1993 t = make_signed_type (precision);
1994
1995 if (precision <= 2 * MAX_BITS_PER_WORD)
1996 signed_and_unsigned_types[precision][unsignedp] = t;
1997
1998 if (TYPE_NAME (t) == 0)
1999 {
2000 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2001 TYPE_NAME (t) = get_identifier (type_name);
2002 }
2003
2004 return t;
2005 }
2006
2007 /* Likewise for floating-point types. */
2008
2009 static tree
2010 float_type_for_precision (int precision, enum machine_mode mode)
2011 {
2012 tree t;
2013 char type_name[20];
2014
2015 if (float_types[(int) mode] != 0)
2016 return float_types[(int) mode];
2017
2018 float_types[(int) mode] = t = make_node (REAL_TYPE);
2019 TYPE_PRECISION (t) = precision;
2020 layout_type (t);
2021
2022 if (TYPE_MODE (t) != mode)
2023 gigi_abort (414);
2024
2025 if (TYPE_NAME (t) == 0)
2026 {
2027 sprintf (type_name, "FLOAT_%d", precision);
2028 TYPE_NAME (t) = get_identifier (type_name);
2029 }
2030
2031 return t;
2032 }
2033
2034 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2035 an unsigned type; otherwise a signed type is returned. */
2036
2037 tree
2038 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2039 {
2040 if (mode == BLKmode)
2041 return NULL_TREE;
2042 else if (mode == VOIDmode)
2043 return void_type_node;
2044 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2045 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2046 else
2047 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2048 }
2049
2050 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2051
2052 tree
2053 gnat_unsigned_type (tree type_node)
2054 {
2055 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2056
2057 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2058 {
2059 type = copy_node (type);
2060 TREE_TYPE (type) = type_node;
2061 }
2062 else if (TREE_TYPE (type_node) != 0
2063 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2064 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2065 {
2066 type = copy_node (type);
2067 TREE_TYPE (type) = TREE_TYPE (type_node);
2068 }
2069
2070 return type;
2071 }
2072
2073 /* Return the signed version of a TYPE_NODE, a scalar type. */
2074
2075 tree
2076 gnat_signed_type (tree type_node)
2077 {
2078 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2079
2080 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2081 {
2082 type = copy_node (type);
2083 TREE_TYPE (type) = type_node;
2084 }
2085 else if (TREE_TYPE (type_node) != 0
2086 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2087 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2088 {
2089 type = copy_node (type);
2090 TREE_TYPE (type) = TREE_TYPE (type_node);
2091 }
2092
2093 return type;
2094 }
2095
2096 /* Return a type the same as TYPE except unsigned or signed according to
2097 UNSIGNEDP. */
2098
2099 tree
2100 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2101 {
2102 if (! INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
2103 return type;
2104 else
2105 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2106 }
2107 \f
2108 /* EXP is an expression for the size of an object. If this size contains
2109 discriminant references, replace them with the maximum (if MAX_P) or
2110 minimum (if ! MAX_P) possible value of the discriminant. */
2111
2112 tree
2113 max_size (tree exp, int max_p)
2114 {
2115 enum tree_code code = TREE_CODE (exp);
2116 tree type = TREE_TYPE (exp);
2117
2118 switch (TREE_CODE_CLASS (code))
2119 {
2120 case 'd':
2121 case 'c':
2122 return exp;
2123
2124 case 'x':
2125 if (code == TREE_LIST)
2126 return tree_cons (TREE_PURPOSE (exp),
2127 max_size (TREE_VALUE (exp), max_p),
2128 TREE_CHAIN (exp) != 0
2129 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2130 break;
2131
2132 case 'r':
2133 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2134 modify. Otherwise, we treat it like a variable. */
2135 if (! CONTAINS_PLACEHOLDER_P (exp))
2136 return exp;
2137
2138 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2139 return
2140 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2141
2142 case '<':
2143 return max_p ? size_one_node : size_zero_node;
2144
2145 case '1':
2146 case '2':
2147 case 'e':
2148 switch (TREE_CODE_LENGTH (code))
2149 {
2150 case 1:
2151 if (code == NON_LVALUE_EXPR)
2152 return max_size (TREE_OPERAND (exp, 0), max_p);
2153 else
2154 return
2155 fold (build1 (code, type,
2156 max_size (TREE_OPERAND (exp, 0),
2157 code == NEGATE_EXPR ? ! max_p : max_p)));
2158
2159 case 2:
2160 if (code == RTL_EXPR)
2161 gigi_abort (407);
2162 else if (code == COMPOUND_EXPR)
2163 return max_size (TREE_OPERAND (exp, 1), max_p);
2164
2165 {
2166 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2167 tree rhs = max_size (TREE_OPERAND (exp, 1),
2168 code == MINUS_EXPR ? ! max_p : max_p);
2169
2170 /* Special-case wanting the maximum value of a MIN_EXPR.
2171 In that case, if one side overflows, return the other.
2172 sizetype is signed, but we know sizes are non-negative.
2173 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2174 overflowing or the maximum possible value and the RHS
2175 a variable. */
2176 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2177 return lhs;
2178 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2179 return rhs;
2180 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2181 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2182 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2183 && ! TREE_CONSTANT (rhs))
2184 return lhs;
2185 else
2186 return fold (build (code, type, lhs, rhs));
2187 }
2188
2189 case 3:
2190 if (code == SAVE_EXPR)
2191 return exp;
2192 else if (code == COND_EXPR)
2193 return fold (build (max_p ? MAX_EXPR : MIN_EXPR, type,
2194 max_size (TREE_OPERAND (exp, 1), max_p),
2195 max_size (TREE_OPERAND (exp, 2), max_p)));
2196 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2197 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2198 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
2199 }
2200 }
2201
2202 gigi_abort (408);
2203 }
2204 \f
2205 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2206 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2207 Return a constructor for the template. */
2208
2209 tree
2210 build_template (tree template_type, tree array_type, tree expr)
2211 {
2212 tree template_elts = NULL_TREE;
2213 tree bound_list = NULL_TREE;
2214 tree field;
2215
2216 if (TREE_CODE (array_type) == RECORD_TYPE
2217 && (TYPE_IS_PADDING_P (array_type)
2218 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2219 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2220
2221 if (TREE_CODE (array_type) == ARRAY_TYPE
2222 || (TREE_CODE (array_type) == INTEGER_TYPE
2223 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2224 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2225
2226 /* First make the list for a CONSTRUCTOR for the template. Go down the
2227 field list of the template instead of the type chain because this
2228 array might be an Ada array of arrays and we can't tell where the
2229 nested arrays stop being the underlying object. */
2230
2231 for (field = TYPE_FIELDS (template_type); field;
2232 (bound_list != 0
2233 ? (bound_list = TREE_CHAIN (bound_list))
2234 : (array_type = TREE_TYPE (array_type))),
2235 field = TREE_CHAIN (TREE_CHAIN (field)))
2236 {
2237 tree bounds, min, max;
2238
2239 /* If we have a bound list, get the bounds from there. Likewise
2240 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2241 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2242 This will give us a maximum range. */
2243 if (bound_list != 0)
2244 bounds = TREE_VALUE (bound_list);
2245 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2246 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2247 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2248 && DECL_BY_COMPONENT_PTR_P (expr))
2249 bounds = TREE_TYPE (field);
2250 else
2251 gigi_abort (411);
2252
2253 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2254 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2255
2256 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2257 substitute it from OBJECT. */
2258 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2259 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2260
2261 template_elts = tree_cons (TREE_CHAIN (field), max,
2262 tree_cons (field, min, template_elts));
2263 }
2264
2265 return gnat_build_constructor (template_type, nreverse (template_elts));
2266 }
2267 \f
2268 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2269 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2270 in the type contains in its DECL_INITIAL the expression to use when
2271 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2272 to print out an error message if the mechanism cannot be applied to
2273 an object of that type and also for the name. */
2274
2275 tree
2276 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2277 {
2278 tree record_type = make_node (RECORD_TYPE);
2279 tree field_list = 0;
2280 int class;
2281 int dtype = 0;
2282 tree inner_type;
2283 int ndim;
2284 int i;
2285 tree *idx_arr;
2286 tree tem;
2287
2288 /* If TYPE is an unconstrained array, use the underlying array type. */
2289 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2290 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2291
2292 /* If this is an array, compute the number of dimensions in the array,
2293 get the index types, and point to the inner type. */
2294 if (TREE_CODE (type) != ARRAY_TYPE)
2295 ndim = 0;
2296 else
2297 for (ndim = 1, inner_type = type;
2298 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2299 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2300 ndim++, inner_type = TREE_TYPE (inner_type))
2301 ;
2302
2303 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2304
2305 if (mech != By_Descriptor_NCA
2306 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2307 for (i = ndim - 1, inner_type = type;
2308 i >= 0;
2309 i--, inner_type = TREE_TYPE (inner_type))
2310 idx_arr[i] = TYPE_DOMAIN (inner_type);
2311 else
2312 for (i = 0, inner_type = type;
2313 i < ndim;
2314 i++, inner_type = TREE_TYPE (inner_type))
2315 idx_arr[i] = TYPE_DOMAIN (inner_type);
2316
2317 /* Now get the DTYPE value. */
2318 switch (TREE_CODE (type))
2319 {
2320 case INTEGER_TYPE:
2321 case ENUMERAL_TYPE:
2322 if (TYPE_VAX_FLOATING_POINT_P (type))
2323 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2324 {
2325 case 6:
2326 dtype = 10;
2327 break;
2328 case 9:
2329 dtype = 11;
2330 break;
2331 case 15:
2332 dtype = 27;
2333 break;
2334 }
2335 else
2336 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2337 {
2338 case 8:
2339 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2340 break;
2341 case 16:
2342 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2343 break;
2344 case 32:
2345 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2346 break;
2347 case 64:
2348 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2349 break;
2350 case 128:
2351 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2352 break;
2353 }
2354 break;
2355
2356 case REAL_TYPE:
2357 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2358 break;
2359
2360 case COMPLEX_TYPE:
2361 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2362 && TYPE_VAX_FLOATING_POINT_P (type))
2363 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2364 {
2365 case 6:
2366 dtype = 12;
2367 break;
2368 case 9:
2369 dtype = 13;
2370 break;
2371 case 15:
2372 dtype = 29;
2373 }
2374 else
2375 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2376 break;
2377
2378 case ARRAY_TYPE:
2379 dtype = 14;
2380 break;
2381
2382 default:
2383 break;
2384 }
2385
2386 /* Get the CLASS value. */
2387 switch (mech)
2388 {
2389 case By_Descriptor_A:
2390 class = 4;
2391 break;
2392 case By_Descriptor_NCA:
2393 class = 10;
2394 break;
2395 case By_Descriptor_SB:
2396 class = 15;
2397 break;
2398 default:
2399 class = 1;
2400 }
2401
2402 /* Make the type for a descriptor for VMS. The first four fields
2403 are the same for all types. */
2404
2405 field_list
2406 = chainon (field_list,
2407 make_descriptor_field
2408 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2409 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2410
2411 field_list = chainon (field_list,
2412 make_descriptor_field ("DTYPE",
2413 gnat_type_for_size (8, 1),
2414 record_type, size_int (dtype)));
2415 field_list = chainon (field_list,
2416 make_descriptor_field ("CLASS",
2417 gnat_type_for_size (8, 1),
2418 record_type, size_int (class)));
2419
2420 field_list
2421 = chainon (field_list,
2422 make_descriptor_field
2423 ("POINTER",
2424 build_pointer_type_for_mode (type, SImode, false), record_type,
2425 build1 (ADDR_EXPR,
2426 build_pointer_type_for_mode (type, SImode, false),
2427 build (PLACEHOLDER_EXPR, type))));
2428
2429 switch (mech)
2430 {
2431 case By_Descriptor:
2432 case By_Descriptor_S:
2433 break;
2434
2435 case By_Descriptor_SB:
2436 field_list
2437 = chainon (field_list,
2438 make_descriptor_field
2439 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2440 TREE_CODE (type) == ARRAY_TYPE
2441 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2442 field_list
2443 = chainon (field_list,
2444 make_descriptor_field
2445 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2446 TREE_CODE (type) == ARRAY_TYPE
2447 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2448 break;
2449
2450 case By_Descriptor_A:
2451 case By_Descriptor_NCA:
2452 field_list = chainon (field_list,
2453 make_descriptor_field ("SCALE",
2454 gnat_type_for_size (8, 1),
2455 record_type,
2456 size_zero_node));
2457
2458 field_list = chainon (field_list,
2459 make_descriptor_field ("DIGITS",
2460 gnat_type_for_size (8, 1),
2461 record_type,
2462 size_zero_node));
2463
2464 field_list
2465 = chainon (field_list,
2466 make_descriptor_field
2467 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2468 size_int (mech == By_Descriptor_NCA
2469 ? 0
2470 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2471 : (TREE_CODE (type) == ARRAY_TYPE
2472 && TYPE_CONVENTION_FORTRAN_P (type)
2473 ? 224 : 192))));
2474
2475 field_list = chainon (field_list,
2476 make_descriptor_field ("DIMCT",
2477 gnat_type_for_size (8, 1),
2478 record_type,
2479 size_int (ndim)));
2480
2481 field_list = chainon (field_list,
2482 make_descriptor_field ("ARSIZE",
2483 gnat_type_for_size (32, 1),
2484 record_type,
2485 size_in_bytes (type)));
2486
2487 /* Now build a pointer to the 0,0,0... element. */
2488 tem = build (PLACEHOLDER_EXPR, type);
2489 for (i = 0, inner_type = type; i < ndim;
2490 i++, inner_type = TREE_TYPE (inner_type))
2491 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2492 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2493
2494 field_list
2495 = chainon (field_list,
2496 make_descriptor_field
2497 ("A0",
2498 build_pointer_type_for_mode (inner_type, SImode, false),
2499 record_type,
2500 build1 (ADDR_EXPR,
2501 build_pointer_type_for_mode (inner_type, SImode,
2502 false),
2503 tem)));
2504
2505 /* Next come the addressing coefficients. */
2506 tem = size_int (1);
2507 for (i = 0; i < ndim; i++)
2508 {
2509 char fname[3];
2510 tree idx_length
2511 = size_binop (MULT_EXPR, tem,
2512 size_binop (PLUS_EXPR,
2513 size_binop (MINUS_EXPR,
2514 TYPE_MAX_VALUE (idx_arr[i]),
2515 TYPE_MIN_VALUE (idx_arr[i])),
2516 size_int (1)));
2517
2518 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2519 fname[1] = '0' + i, fname[2] = 0;
2520 field_list
2521 = chainon (field_list,
2522 make_descriptor_field (fname,
2523 gnat_type_for_size (32, 1),
2524 record_type, idx_length));
2525
2526 if (mech == By_Descriptor_NCA)
2527 tem = idx_length;
2528 }
2529
2530 /* Finally here are the bounds. */
2531 for (i = 0; i < ndim; i++)
2532 {
2533 char fname[3];
2534
2535 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2536 field_list
2537 = chainon (field_list,
2538 make_descriptor_field
2539 (fname, gnat_type_for_size (32, 1), record_type,
2540 TYPE_MIN_VALUE (idx_arr[i])));
2541
2542 fname[0] = 'U';
2543 field_list
2544 = chainon (field_list,
2545 make_descriptor_field
2546 (fname, gnat_type_for_size (32, 1), record_type,
2547 TYPE_MAX_VALUE (idx_arr[i])));
2548 }
2549 break;
2550
2551 default:
2552 post_error ("unsupported descriptor type for &", gnat_entity);
2553 }
2554
2555 finish_record_type (record_type, field_list, 0, 1);
2556 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2557 record_type));
2558
2559 return record_type;
2560 }
2561
2562 /* Utility routine for above code to make a field. */
2563
2564 static tree
2565 make_descriptor_field (const char *name, tree type,
2566 tree rec_type, tree initial)
2567 {
2568 tree field
2569 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2570
2571 DECL_INITIAL (field) = initial;
2572 return field;
2573 }
2574 \f
2575 /* Build a type to be used to represent an aliased object whose nominal
2576 type is an unconstrained array. This consists of a RECORD_TYPE containing
2577 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2578 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2579 is used to represent an arbitrary unconstrained object. Use NAME
2580 as the name of the record. */
2581
2582 tree
2583 build_unc_object_type (tree template_type, tree object_type, tree name)
2584 {
2585 tree type = make_node (RECORD_TYPE);
2586 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2587 template_type, type, 0, 0, 0, 1);
2588 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2589 type, 0, 0, 0, 1);
2590
2591 TYPE_NAME (type) = name;
2592 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2593 finish_record_type (type,
2594 chainon (chainon (NULL_TREE, template_field),
2595 array_field),
2596 0, 0);
2597
2598 return type;
2599 }
2600 \f
2601 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2602 the normal case this is just two adjustments, but we have more to do
2603 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2604
2605 void
2606 update_pointer_to (tree old_type, tree new_type)
2607 {
2608 tree ptr = TYPE_POINTER_TO (old_type);
2609 tree ref = TYPE_REFERENCE_TO (old_type);
2610 tree type;
2611
2612 /* If this is the main variant, process all the other variants first. */
2613 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2614 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2615 type = TYPE_NEXT_VARIANT (type))
2616 update_pointer_to (type, new_type);
2617
2618 /* If no pointer or reference, we are done. */
2619 if (ptr == 0 && ref == 0)
2620 return;
2621
2622 /* Merge the old type qualifiers in the new type.
2623
2624 Each old variant has qualifiers for specific reasons, and the new
2625 designated type as well. Each set of qualifiers represents useful
2626 information grabbed at some point, and merging the two simply unifies
2627 these inputs into the final type description.
2628
2629 Consider for instance a volatile type frozen after an access to constant
2630 type designating it. After the designated type freeze, we get here with a
2631 volatile new_type and a dummy old_type with a readonly variant, created
2632 when the access type was processed. We shall make a volatile and readonly
2633 designated type, because that's what it really is.
2634
2635 We might also get here for a non-dummy old_type variant with different
2636 qualifiers than the new_type ones, for instance in some cases of pointers
2637 to private record type elaboration (see the comments around the call to
2638 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2639 qualifiers in thoses cases too, to avoid accidentally discarding the
2640 initial set, and will often end up with old_type == new_type then. */
2641 new_type = build_qualified_type (new_type,
2642 TYPE_QUALS (old_type)
2643 | TYPE_QUALS (new_type));
2644
2645 /* If the new type and the old one are identical, there is nothing to
2646 update. */
2647 if (old_type == new_type)
2648 return;
2649
2650 /* Otherwise, first handle the simple case. */
2651 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2652 {
2653 TYPE_POINTER_TO (new_type) = ptr;
2654 TYPE_REFERENCE_TO (new_type) = ref;
2655
2656 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2657 {
2658 TREE_TYPE (ptr) = new_type;
2659
2660 if (TYPE_NAME (ptr) != 0
2661 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2662 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2663 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2664 global_bindings_p (), 0);
2665 }
2666
2667 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2668 {
2669 TREE_TYPE (ref) = new_type;
2670
2671 if (TYPE_NAME (ref) != 0
2672 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2673 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2674 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2675 global_bindings_p (), 0);
2676 }
2677 }
2678
2679 /* Now deal with the unconstrained array case. In this case the "pointer"
2680 is actually a RECORD_TYPE where the types of both fields are
2681 pointers to void. In that case, copy the field list from the
2682 old type to the new one and update the fields' context. */
2683 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2684 gigi_abort (412);
2685
2686 else
2687 {
2688 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2689 tree ptr_temp_type;
2690 tree new_ref;
2691 tree var;
2692
2693 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2694 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2695 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2696
2697 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2698 template bounds.
2699
2700 ??? This is now the only use of gnat_substitute_in_type, which
2701 is now a very "heavy" routine to do this, so it should be replaced
2702 at some point. */
2703 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2704 new_ref = build (COMPONENT_REF, ptr_temp_type,
2705 build (PLACEHOLDER_EXPR, ptr),
2706 TREE_CHAIN (TYPE_FIELDS (ptr)));
2707
2708 update_pointer_to
2709 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2710 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2711 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2712
2713 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2714 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2715
2716 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2717 = TREE_TYPE (new_type) = ptr;
2718
2719 /* Now handle updating the allocation record, what the thin pointer
2720 points to. Update all pointers from the old record into the new
2721 one, update the types of the fields, and recompute the size. */
2722
2723 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2724
2725 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2726 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2727 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2728 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2729 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2730 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2731 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2732
2733 TYPE_SIZE (new_obj_rec)
2734 = size_binop (PLUS_EXPR,
2735 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2736 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2737 TYPE_SIZE_UNIT (new_obj_rec)
2738 = size_binop (PLUS_EXPR,
2739 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2740 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2741 rest_of_type_compilation (ptr, global_bindings_p ());
2742 }
2743 }
2744 \f
2745 /* Convert a pointer to a constrained array into a pointer to a fat
2746 pointer. This involves making or finding a template. */
2747
2748 static tree
2749 convert_to_fat_pointer (tree type, tree expr)
2750 {
2751 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2752 tree template, template_addr;
2753 tree etype = TREE_TYPE (expr);
2754
2755 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2756 pointer to the template and array. */
2757 if (integer_zerop (expr))
2758 return
2759 gnat_build_constructor
2760 (type,
2761 tree_cons (TYPE_FIELDS (type),
2762 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2763 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2764 convert (build_pointer_type (template_type),
2765 expr),
2766 NULL_TREE)));
2767
2768 /* If EXPR is a thin pointer, make the template and data from the record. */
2769
2770 else if (TYPE_THIN_POINTER_P (etype))
2771 {
2772 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2773
2774 expr = save_expr (expr);
2775 if (TREE_CODE (expr) == ADDR_EXPR)
2776 expr = TREE_OPERAND (expr, 0);
2777 else
2778 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2779
2780 template = build_component_ref (expr, NULL_TREE, fields, 0);
2781 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2782 build_component_ref (expr, NULL_TREE,
2783 TREE_CHAIN (fields), 0));
2784 }
2785 else
2786 /* Otherwise, build the constructor for the template. */
2787 template = build_template (template_type, TREE_TYPE (etype), expr);
2788
2789 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2790
2791 /* The result is a CONSTRUCTOR for the fat pointer.
2792
2793 If expr is an argument of a foreign convention subprogram, the type it
2794 points to is directly the component type. In this case, the expression
2795 type may not match the corresponding FIELD_DECL type at this point, so we
2796 call "convert" here to fix that up if necessary. This type consistency is
2797 required, for instance because it ensures that possible later folding of
2798 component_refs against this constructor always yields something of the
2799 same type as the initial reference.
2800
2801 Note that the call to "build_template" above is still fine, because it
2802 will only refer to the provided template_type in this case. */
2803 return
2804 gnat_build_constructor
2805 (type, tree_cons (TYPE_FIELDS (type),
2806 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2807 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2808 template_addr, NULL_TREE)));
2809 }
2810 \f
2811 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2812 is something that is a fat pointer, so convert to it first if it EXPR
2813 is not already a fat pointer. */
2814
2815 static tree
2816 convert_to_thin_pointer (tree type, tree expr)
2817 {
2818 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2819 expr
2820 = convert_to_fat_pointer
2821 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2822
2823 /* We get the pointer to the data and use a NOP_EXPR to make it the
2824 proper GCC type. */
2825 expr
2826 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2827 expr = build1 (NOP_EXPR, type, expr);
2828
2829 return expr;
2830 }
2831 \f
2832 /* Create an expression whose value is that of EXPR,
2833 converted to type TYPE. The TREE_TYPE of the value
2834 is always TYPE. This function implements all reasonable
2835 conversions; callers should filter out those that are
2836 not permitted by the language being compiled. */
2837
2838 tree
2839 convert (tree type, tree expr)
2840 {
2841 enum tree_code code = TREE_CODE (type);
2842 tree etype = TREE_TYPE (expr);
2843 enum tree_code ecode = TREE_CODE (etype);
2844 tree tem;
2845
2846 /* If EXPR is already the right type, we are done. */
2847 if (type == etype)
2848 return expr;
2849 /* If we're converting between two aggregate types that have the same main
2850 variant, just make a NOP_EXPR. */
2851 else if (AGGREGATE_TYPE_P (type)
2852 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2853 return build1 (NOP_EXPR, type, expr);
2854
2855 /* If the input type has padding, remove it by doing a component reference
2856 to the field. If the output type has padding, make a constructor
2857 to build the record. If both input and output have padding and are
2858 of variable size, do this as an unchecked conversion. */
2859 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2860 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2861 && (! TREE_CONSTANT (TYPE_SIZE (type))
2862 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2863 ;
2864 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2865 {
2866 /* If we have just converted to this padded type, just get
2867 the inner expression. */
2868 if (TREE_CODE (expr) == CONSTRUCTOR
2869 && CONSTRUCTOR_ELTS (expr) != 0
2870 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2871 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2872 else
2873 return convert (type, build_component_ref (expr, NULL_TREE,
2874 TYPE_FIELDS (etype), 0));
2875 }
2876 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2877 {
2878 /* If we previously converted from another type and our type is
2879 of variable size, remove the conversion to avoid the need for
2880 variable-size temporaries. */
2881 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2882 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2883 expr = TREE_OPERAND (expr, 0);
2884
2885 /* If we are just removing the padding from expr, convert the original
2886 object if we have variable size. That will avoid the need
2887 for some variable-size temporaries. */
2888 if (TREE_CODE (expr) == COMPONENT_REF
2889 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2890 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2891 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2892 return convert (type, TREE_OPERAND (expr, 0));
2893
2894 /* If the result type is a padded type with a self-referentially-sized
2895 field and the expression type is a record, do this as an
2896 unchecked converstion. */
2897 else if (TREE_CODE (etype) == RECORD_TYPE
2898 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2899 return unchecked_convert (type, expr, 0);
2900
2901 else
2902 return
2903 gnat_build_constructor (type,
2904 tree_cons (TYPE_FIELDS (type),
2905 convert (TREE_TYPE
2906 (TYPE_FIELDS (type)),
2907 expr),
2908 NULL_TREE));
2909 }
2910
2911 /* If the input is a biased type, adjust first. */
2912 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2913 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2914 fold (build1 (NOP_EXPR,
2915 TREE_TYPE (etype), expr)),
2916 TYPE_MIN_VALUE (etype))));
2917
2918 /* If the input is a left-justified modular type, we need to extract
2919 the actual object before converting it to any other type with the
2920 exception of an unconstrained array. */
2921 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2922 && code != UNCONSTRAINED_ARRAY_TYPE)
2923 return convert (type, build_component_ref (expr, NULL_TREE,
2924 TYPE_FIELDS (etype), 0));
2925
2926 /* If converting to a type that contains a template, convert to the data
2927 type and then build the template. */
2928 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2929 {
2930 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2931
2932 /* If the source already has a template, get a reference to the
2933 associated array only, as we are going to rebuild a template
2934 for the target type anyway. */
2935 expr = maybe_unconstrained_array (expr);
2936
2937 return
2938 gnat_build_constructor
2939 (type,
2940 tree_cons (TYPE_FIELDS (type),
2941 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2942 obj_type, NULL_TREE),
2943 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2944 convert (obj_type, expr), NULL_TREE)));
2945 }
2946
2947 /* There are some special cases of expressions that we process
2948 specially. */
2949 switch (TREE_CODE (expr))
2950 {
2951 case ERROR_MARK:
2952 return expr;
2953
2954 case NULL_EXPR:
2955 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2956 conversion in gnat_expand_expr. NULL_EXPR does not represent
2957 and actual value, so no conversion is needed. */
2958 expr = copy_node (expr);
2959 TREE_TYPE (expr) = type;
2960 return expr;
2961
2962 case STRING_CST:
2963 case CONSTRUCTOR:
2964 /* If we are converting a STRING_CST to another constrained array type,
2965 just make a new one in the proper type. Likewise for
2966 CONSTRUCTOR if the alias sets are the same. */
2967 if (code == ecode && AGGREGATE_TYPE_P (etype)
2968 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2969 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2970 && (TREE_CODE (expr) == STRING_CST
2971 || get_alias_set (etype) == get_alias_set (type)))
2972 {
2973 expr = copy_node (expr);
2974 TREE_TYPE (expr) = type;
2975 return expr;
2976 }
2977 break;
2978
2979 case COMPONENT_REF:
2980 /* If we are converting between two aggregate types of the same
2981 kind, size, mode, and alignment, just make a new COMPONENT_REF.
2982 This avoid unneeded conversions which makes reference computations
2983 more complex. */
2984 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
2985 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2986 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
2987 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0)
2988 && get_alias_set (type) == get_alias_set (etype))
2989 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
2990 TREE_OPERAND (expr, 1));
2991
2992 break;
2993
2994 case UNCONSTRAINED_ARRAY_REF:
2995 /* Convert this to the type of the inner array by getting the address of
2996 the array from the template. */
2997 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2998 build_component_ref (TREE_OPERAND (expr, 0),
2999 get_identifier ("P_ARRAY"),
3000 NULL_TREE, 0));
3001 etype = TREE_TYPE (expr);
3002 ecode = TREE_CODE (etype);
3003 break;
3004
3005 case VIEW_CONVERT_EXPR:
3006 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3007 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3008 return convert (type, TREE_OPERAND (expr, 0));
3009 break;
3010
3011 case INDIRECT_REF:
3012 /* If both types are record types, just convert the pointer and
3013 make a new INDIRECT_REF.
3014
3015 ??? Disable this for now since it causes problems with the
3016 code in build_binary_op for MODIFY_EXPR which wants to
3017 strip off conversions. But that code really is a mess and
3018 we need to do this a much better way some time. */
3019 if (0
3020 && (TREE_CODE (type) == RECORD_TYPE
3021 || TREE_CODE (type) == UNION_TYPE)
3022 && (TREE_CODE (etype) == RECORD_TYPE
3023 || TREE_CODE (etype) == UNION_TYPE)
3024 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3025 return build_unary_op (INDIRECT_REF, NULL_TREE,
3026 convert (build_pointer_type (type),
3027 TREE_OPERAND (expr, 0)));
3028 break;
3029
3030 default:
3031 break;
3032 }
3033
3034 /* Check for converting to a pointer to an unconstrained array. */
3035 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3036 return convert_to_fat_pointer (type, expr);
3037
3038 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3039 || (code == INTEGER_CST && ecode == INTEGER_CST
3040 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3041 return fold (build1 (NOP_EXPR, type, expr));
3042
3043 switch (code)
3044 {
3045 case VOID_TYPE:
3046 return build1 (CONVERT_EXPR, type, expr);
3047
3048 case BOOLEAN_TYPE:
3049 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
3050
3051 case INTEGER_TYPE:
3052 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3053 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3054 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3055 return unchecked_convert (type, expr, 0);
3056 else if (TYPE_BIASED_REPRESENTATION_P (type))
3057 return fold (build1 (CONVERT_EXPR, type,
3058 fold (build (MINUS_EXPR, TREE_TYPE (type),
3059 convert (TREE_TYPE (type), expr),
3060 TYPE_MIN_VALUE (type)))));
3061
3062 /* ... fall through ... */
3063
3064 case ENUMERAL_TYPE:
3065 return fold (convert_to_integer (type, expr));
3066
3067 case POINTER_TYPE:
3068 case REFERENCE_TYPE:
3069 /* If converting between two pointers to records denoting
3070 both a template and type, adjust if needed to account
3071 for any differing offsets, since one might be negative. */
3072 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3073 {
3074 tree bit_diff
3075 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3076 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3077 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3078 sbitsize_int (BITS_PER_UNIT));
3079
3080 expr = build1 (NOP_EXPR, type, expr);
3081 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3082 if (integer_zerop (byte_diff))
3083 return expr;
3084
3085 return build_binary_op (PLUS_EXPR, type, expr,
3086 fold (convert_to_pointer (type, byte_diff)));
3087 }
3088
3089 /* If converting to a thin pointer, handle specially. */
3090 if (TYPE_THIN_POINTER_P (type)
3091 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3092 return convert_to_thin_pointer (type, expr);
3093
3094 /* If converting fat pointer to normal pointer, get the pointer to the
3095 array and then convert it. */
3096 else if (TYPE_FAT_POINTER_P (etype))
3097 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3098 NULL_TREE, 0);
3099
3100 return fold (convert_to_pointer (type, expr));
3101
3102 case REAL_TYPE:
3103 return fold (convert_to_real (type, expr));
3104
3105 case RECORD_TYPE:
3106 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3107 return
3108 gnat_build_constructor
3109 (type, tree_cons (TYPE_FIELDS (type),
3110 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3111 NULL_TREE));
3112
3113 /* ... fall through ... */
3114
3115 case ARRAY_TYPE:
3116 /* In these cases, assume the front-end has validated the conversion.
3117 If the conversion is valid, it will be a bit-wise conversion, so
3118 it can be viewed as an unchecked conversion. */
3119 return unchecked_convert (type, expr, 0);
3120
3121 case UNION_TYPE:
3122 /* Just validate that the type is indeed that of a field
3123 of the type. Then make the simple conversion. */
3124 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3125 {
3126 if (TREE_TYPE (tem) == etype)
3127 return build1 (CONVERT_EXPR, type, expr);
3128 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3129 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3130 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3131 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3132 return build1 (CONVERT_EXPR, type,
3133 convert (TREE_TYPE (tem), expr));
3134 }
3135
3136 gigi_abort (413);
3137
3138 case UNCONSTRAINED_ARRAY_TYPE:
3139 /* If EXPR is a constrained array, take its address, convert it to a
3140 fat pointer, and then dereference it. Likewise if EXPR is a
3141 record containing both a template and a constrained array.
3142 Note that a record representing a left justified modular type
3143 always represents a packed constrained array. */
3144 if (ecode == ARRAY_TYPE
3145 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3146 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3147 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3148 return
3149 build_unary_op
3150 (INDIRECT_REF, NULL_TREE,
3151 convert_to_fat_pointer (TREE_TYPE (type),
3152 build_unary_op (ADDR_EXPR,
3153 NULL_TREE, expr)));
3154
3155 /* Do something very similar for converting one unconstrained
3156 array to another. */
3157 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3158 return
3159 build_unary_op (INDIRECT_REF, NULL_TREE,
3160 convert (TREE_TYPE (type),
3161 build_unary_op (ADDR_EXPR,
3162 NULL_TREE, expr)));
3163 else
3164 gigi_abort (409);
3165
3166 case COMPLEX_TYPE:
3167 return fold (convert_to_complex (type, expr));
3168
3169 default:
3170 gigi_abort (410);
3171 }
3172 }
3173 \f
3174 /* Remove all conversions that are done in EXP. This includes converting
3175 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3176 is nonzero, always return the address of the containing object even if
3177 the address is not bit-aligned. */
3178
3179 tree
3180 remove_conversions (tree exp, int true_address)
3181 {
3182 switch (TREE_CODE (exp))
3183 {
3184 case CONSTRUCTOR:
3185 if (true_address
3186 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3187 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3188 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3189 break;
3190
3191 case COMPONENT_REF:
3192 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3193 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3194 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3195 break;
3196
3197 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3198 case NOP_EXPR: case CONVERT_EXPR:
3199 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3200
3201 default:
3202 break;
3203 }
3204
3205 return exp;
3206 }
3207 \f
3208 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3209 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3210 likewise return an expression pointing to the underlying array. */
3211
3212 tree
3213 maybe_unconstrained_array (tree exp)
3214 {
3215 enum tree_code code = TREE_CODE (exp);
3216 tree new;
3217
3218 switch (TREE_CODE (TREE_TYPE (exp)))
3219 {
3220 case UNCONSTRAINED_ARRAY_TYPE:
3221 if (code == UNCONSTRAINED_ARRAY_REF)
3222 {
3223 new
3224 = build_unary_op (INDIRECT_REF, NULL_TREE,
3225 build_component_ref (TREE_OPERAND (exp, 0),
3226 get_identifier ("P_ARRAY"),
3227 NULL_TREE, 0));
3228 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3229 return new;
3230 }
3231
3232 else if (code == NULL_EXPR)
3233 return build1 (NULL_EXPR,
3234 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3235 (TREE_TYPE (TREE_TYPE (exp))))),
3236 TREE_OPERAND (exp, 0));
3237
3238 case RECORD_TYPE:
3239 /* If this is a padded type, convert to the unpadded type and see if
3240 it contains a template. */
3241 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3242 {
3243 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3244 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3245 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3246 return
3247 build_component_ref (new, NULL_TREE,
3248 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3249 0);
3250 }
3251 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3252 return
3253 build_component_ref (exp, NULL_TREE,
3254 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3255 break;
3256
3257 default:
3258 break;
3259 }
3260
3261 return exp;
3262 }
3263 \f
3264 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3265 If NOTRUNC_P is set, truncation operations should be suppressed. */
3266
3267 tree
3268 unchecked_convert (tree type, tree expr, int notrunc_p)
3269 {
3270 tree etype = TREE_TYPE (expr);
3271
3272 /* If the expression is already the right type, we are done. */
3273 if (etype == type)
3274 return expr;
3275
3276 /* If both types types are integral just do a normal conversion.
3277 Likewise for a conversion to an unconstrained array. */
3278 if ((((INTEGRAL_TYPE_P (type)
3279 && ! (TREE_CODE (type) == INTEGER_TYPE
3280 && TYPE_VAX_FLOATING_POINT_P (type)))
3281 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3282 || (TREE_CODE (type) == RECORD_TYPE
3283 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3284 && ((INTEGRAL_TYPE_P (etype)
3285 && ! (TREE_CODE (etype) == INTEGER_TYPE
3286 && TYPE_VAX_FLOATING_POINT_P (etype)))
3287 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3288 || (TREE_CODE (etype) == RECORD_TYPE
3289 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3290 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3291 {
3292 tree rtype = type;
3293
3294 if (TREE_CODE (etype) == INTEGER_TYPE
3295 && TYPE_BIASED_REPRESENTATION_P (etype))
3296 {
3297 tree ntype = copy_type (etype);
3298
3299 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3300 TYPE_MAIN_VARIANT (ntype) = ntype;
3301 expr = build1 (NOP_EXPR, ntype, expr);
3302 }
3303
3304 if (TREE_CODE (type) == INTEGER_TYPE
3305 && TYPE_BIASED_REPRESENTATION_P (type))
3306 {
3307 rtype = copy_type (type);
3308 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3309 TYPE_MAIN_VARIANT (rtype) = rtype;
3310 }
3311
3312 expr = convert (rtype, expr);
3313 if (type != rtype)
3314 expr = build1 (NOP_EXPR, type, expr);
3315 }
3316
3317 /* If we are converting TO an integral type whose precision is not the
3318 same as its size, first unchecked convert to a record that contains
3319 an object of the output type. Then extract the field. */
3320 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3321 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3322 GET_MODE_BITSIZE (TYPE_MODE (type))))
3323 {
3324 tree rec_type = make_node (RECORD_TYPE);
3325 tree field = create_field_decl (get_identifier ("OBJ"), type,
3326 rec_type, 1, 0, 0, 0);
3327
3328 TYPE_FIELDS (rec_type) = field;
3329 layout_type (rec_type);
3330
3331 expr = unchecked_convert (rec_type, expr, notrunc_p);
3332 expr = build_component_ref (expr, NULL_TREE, field, 0);
3333 }
3334
3335 /* Similarly for integral input type whose precision is not equal to its
3336 size. */
3337 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3338 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3339 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3340 {
3341 tree rec_type = make_node (RECORD_TYPE);
3342 tree field
3343 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3344 1, 0, 0, 0);
3345
3346 TYPE_FIELDS (rec_type) = field;
3347 layout_type (rec_type);
3348
3349 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3350 expr = unchecked_convert (type, expr, notrunc_p);
3351 }
3352
3353 /* We have a special case when we are converting between two
3354 unconstrained array types. In that case, take the address,
3355 convert the fat pointer types, and dereference. */
3356 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3357 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3358 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3359 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3360 build_unary_op (ADDR_EXPR, NULL_TREE,
3361 expr)));
3362 else
3363 {
3364 expr = maybe_unconstrained_array (expr);
3365 etype = TREE_TYPE (expr);
3366 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3367 }
3368
3369 /* If the result is an integral type whose size is not equal to
3370 the size of the underlying machine type, sign- or zero-extend
3371 the result. We need not do this in the case where the input is
3372 an integral type of the same precision and signedness or if the output
3373 is a biased type or if both the input and output are unsigned. */
3374 if (! notrunc_p
3375 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3376 && ! (TREE_CODE (type) == INTEGER_TYPE
3377 && TYPE_BIASED_REPRESENTATION_P (type))
3378 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3379 GET_MODE_BITSIZE (TYPE_MODE (type)))
3380 && ! (INTEGRAL_TYPE_P (etype)
3381 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3382 && operand_equal_p (TYPE_RM_SIZE (type),
3383 (TYPE_RM_SIZE (etype) != 0
3384 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3385 0))
3386 && ! (TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3387 {
3388 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3389 TYPE_UNSIGNED (type));
3390 tree shift_expr
3391 = convert (base_type,
3392 size_binop (MINUS_EXPR,
3393 bitsize_int
3394 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3395 TYPE_RM_SIZE (type)));
3396 expr
3397 = convert (type,
3398 build_binary_op (RSHIFT_EXPR, base_type,
3399 build_binary_op (LSHIFT_EXPR, base_type,
3400 convert (base_type, expr),
3401 shift_expr),
3402 shift_expr));
3403 }
3404
3405 /* An unchecked conversion should never raise Constraint_Error. The code
3406 below assumes that GCC's conversion routines overflow the same way that
3407 the underlying hardware does. This is probably true. In the rare case
3408 when it is false, we can rely on the fact that such conversions are
3409 erroneous anyway. */
3410 if (TREE_CODE (expr) == INTEGER_CST)
3411 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3412
3413 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3414 show no longer constant. */
3415 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3416 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3417 OEP_ONLY_CONST))
3418 TREE_CONSTANT (expr) = 0;
3419
3420 return expr;
3421 }
3422
3423 #include "gt-ada-utils.h"
3424 #include "gtype-ada.h"