Fix for c330001 - PR ada/19386
[gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2005, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40 #include "function.h"
41 #include "cgraph.h"
42 #include "tree-inline.h"
43 #include "tree-gimple.h"
44 #include "tree-dump.h"
45
46 #include "ada.h"
47 #include "types.h"
48 #include "atree.h"
49 #include "elists.h"
50 #include "namet.h"
51 #include "nlists.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 #ifndef MAX_FIXED_MODE_SIZE
61 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
62 #endif
63
64 #ifndef MAX_BITS_PER_WORD
65 #define MAX_BITS_PER_WORD BITS_PER_WORD
66 #endif
67
68 /* If nonzero, pretend we are allocating at global level. */
69 int force_global;
70
71 /* Tree nodes for the various types and decls we create. */
72 tree gnat_std_decls[(int) ADT_LAST];
73
74 /* Functions to call for each of the possible raise reasons. */
75 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
76
77 /* Associates a GNAT tree node to a GCC tree node. It is used in
78 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
79 of `save_gnu_tree' for more info. */
80 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
81
82 /* This variable keeps a table for types for each precision so that we only
83 allocate each of them once. Signed and unsigned types are kept separate.
84
85 Note that these types are only used when fold-const requests something
86 special. Perhaps we should NOT share these types; we'll see how it
87 goes later. */
88 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
89
90 /* Likewise for float types, but record these by mode. */
91 static GTY(()) tree float_types[NUM_MACHINE_MODES];
92
93 /* For each binding contour we allocate a binding_level structure to indicate
94 the binding depth. */
95
96 struct gnat_binding_level GTY((chain_next ("%h.chain")))
97 {
98 /* The binding level containing this one (the enclosing binding level). */
99 struct gnat_binding_level *chain;
100 /* The BLOCK node for this level. */
101 tree block;
102 /* If nonzero, the setjmp buffer that needs to be updated for any
103 variable-sized definition within this context. */
104 tree jmpbuf_decl;
105 };
106
107 /* The binding level currently in effect. */
108 static GTY(()) struct gnat_binding_level *current_binding_level;
109
110 /* A chain of gnat_binding_level structures awaiting reuse. */
111 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
112
113 /* A chain of unused BLOCK nodes. */
114 static GTY((deletable)) tree free_block_chain;
115
116 struct language_function GTY(())
117 {
118 int unused;
119 };
120
121 static void gnat_install_builtins (void);
122 static tree merge_sizes (tree, tree, tree, bool, bool);
123 static tree compute_related_constant (tree, tree);
124 static tree split_plus (tree, tree *);
125 static bool value_zerop (tree);
126 static void gnat_gimplify_function (tree);
127 static tree float_type_for_precision (int, enum machine_mode);
128 static tree convert_to_fat_pointer (tree, tree);
129 static tree convert_to_thin_pointer (tree, tree);
130 static tree make_descriptor_field (const char *,tree, tree, tree);
131 static bool value_factor_p (tree, HOST_WIDE_INT);
132 static bool potential_alignment_gap (tree, tree, tree);
133 \f
134 /* Initialize the association of GNAT nodes to GCC trees. */
135
136 void
137 init_gnat_to_gnu (void)
138 {
139 associate_gnat_to_gnu
140 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
141 }
142
143 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
144 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
145 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
146
147 If GNU_DECL is zero, a previous association is to be reset. */
148
149 void
150 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
151 {
152 /* Check that GNAT_ENTITY is not already defined and that it is being set
153 to something which is a decl. Raise gigi 401 if not. Usually, this
154 means GNAT_ENTITY is defined twice, but occasionally is due to some
155 Gigi problem. */
156 gcc_assert (!gnu_decl
157 || (!associate_gnat_to_gnu[gnat_entity - First_Node_Id]
158 && (no_check || DECL_P (gnu_decl))));
159 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
160 }
161
162 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
163 Return the ..._DECL node that was associated with it. If there is no tree
164 node associated with GNAT_ENTITY, abort.
165
166 In some cases, such as delayed elaboration or expressions that need to
167 be elaborated only once, GNAT_ENTITY is really not an entity. */
168
169 tree
170 get_gnu_tree (Entity_Id gnat_entity)
171 {
172 gcc_assert (associate_gnat_to_gnu[gnat_entity - First_Node_Id]);
173 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
174 }
175
176 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
177
178 bool
179 present_gnu_tree (Entity_Id gnat_entity)
180 {
181 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id]) != 0;
182 }
183
184 \f
185 /* Return non-zero if we are currently in the global binding level. */
186
187 int
188 global_bindings_p (void)
189 {
190 return ((force_global || !current_function_decl) ? -1 : 0);
191 }
192
193 /* Enter a new binding level. */
194
195 void
196 gnat_pushlevel ()
197 {
198 struct gnat_binding_level *newlevel = NULL;
199
200 /* Reuse a struct for this binding level, if there is one. */
201 if (free_binding_level)
202 {
203 newlevel = free_binding_level;
204 free_binding_level = free_binding_level->chain;
205 }
206 else
207 newlevel
208 = (struct gnat_binding_level *)
209 ggc_alloc (sizeof (struct gnat_binding_level));
210
211 /* Use a free BLOCK, if any; otherwise, allocate one. */
212 if (free_block_chain)
213 {
214 newlevel->block = free_block_chain;
215 free_block_chain = TREE_CHAIN (free_block_chain);
216 TREE_CHAIN (newlevel->block) = NULL_TREE;
217 }
218 else
219 newlevel->block = make_node (BLOCK);
220
221 /* Point the BLOCK we just made to its parent. */
222 if (current_binding_level)
223 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
224
225 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
226 TREE_USED (newlevel->block) = 1;
227
228 /* Add this level to the front of the chain (stack) of levels that are
229 active. */
230 newlevel->chain = current_binding_level;
231 newlevel->jmpbuf_decl = NULL_TREE;
232 current_binding_level = newlevel;
233 }
234
235 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
236 and point FNDECL to this BLOCK. */
237
238 void
239 set_current_block_context (tree fndecl)
240 {
241 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
242 DECL_INITIAL (fndecl) = current_binding_level->block;
243 }
244
245 /* Set the jmpbuf_decl for the current binding level to DECL. */
246
247 void
248 set_block_jmpbuf_decl (tree decl)
249 {
250 current_binding_level->jmpbuf_decl = decl;
251 }
252
253 /* Get the jmpbuf_decl, if any, for the current binding level. */
254
255 tree
256 get_block_jmpbuf_decl ()
257 {
258 return current_binding_level->jmpbuf_decl;
259 }
260
261 /* Exit a binding level. Set any BLOCK into the current code group. */
262
263 void
264 gnat_poplevel ()
265 {
266 struct gnat_binding_level *level = current_binding_level;
267 tree block = level->block;
268
269 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
270 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
271
272 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
273 are no variables free the block and merge its subblocks into those of its
274 parent block. Otherwise, add it to the list of its parent. */
275 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
276 ;
277 else if (BLOCK_VARS (block) == NULL_TREE)
278 {
279 BLOCK_SUBBLOCKS (level->chain->block)
280 = chainon (BLOCK_SUBBLOCKS (block),
281 BLOCK_SUBBLOCKS (level->chain->block));
282 TREE_CHAIN (block) = free_block_chain;
283 free_block_chain = block;
284 }
285 else
286 {
287 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
288 BLOCK_SUBBLOCKS (level->chain->block) = block;
289 TREE_USED (block) = 1;
290 set_block_for_group (block);
291 }
292
293 /* Free this binding structure. */
294 current_binding_level = level->chain;
295 level->chain = free_binding_level;
296 free_binding_level = level;
297 }
298
299 /* Insert BLOCK at the end of the list of subblocks of the
300 current binding level. This is used when a BIND_EXPR is expanded,
301 to handle the BLOCK node inside the BIND_EXPR. */
302
303 void
304 insert_block (tree block)
305 {
306 TREE_USED (block) = 1;
307 TREE_CHAIN (block) = BLOCK_SUBBLOCKS (current_binding_level->block);
308 BLOCK_SUBBLOCKS (current_binding_level->block) = block;
309 }
310 \f
311 /* Records a ..._DECL node DECL as belonging to the current lexical scope
312 and uses GNAT_NODE for location information and propagating flags. */
313
314 void
315 gnat_pushdecl (tree decl, Node_Id gnat_node)
316 {
317 /* If at top level, there is no context. But PARM_DECLs always go in the
318 level of its function. */
319 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
320 DECL_CONTEXT (decl) = 0;
321 else
322 DECL_CONTEXT (decl) = current_function_decl;
323
324 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
325
326 /* Set the location of DECL and emit a declaration for it. */
327 if (Present (gnat_node))
328 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
329 add_decl_expr (decl, gnat_node);
330
331 /* Put the declaration on the list. The list of declarations is in reverse
332 order. The list will be reversed later. We don't do this for global
333 variables. Also, don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
334 the list. They will cause trouble with the debugger and aren't needed
335 anyway. */
336 if (!global_bindings_p ()
337 && (TREE_CODE (decl) != TYPE_DECL
338 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE))
339 {
340 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
341 BLOCK_VARS (current_binding_level->block) = decl;
342 }
343
344 /* For the declaration of a type, set its name if it either is not already
345 set, was set to an IDENTIFIER_NODE, indicating an internal name,
346 or if the previous type name was not derived from a source name.
347 We'd rather have the type named with a real name and all the pointer
348 types to the same object have the same POINTER_TYPE node. Code in this
349 function in c-decl.c makes a copy of the type node here, but that may
350 cause us trouble with incomplete types, so let's not try it (at least
351 for now). */
352
353 if (TREE_CODE (decl) == TYPE_DECL
354 && DECL_NAME (decl)
355 && (!TYPE_NAME (TREE_TYPE (decl))
356 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
357 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
358 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
359 && !DECL_ARTIFICIAL (decl))))
360 TYPE_NAME (TREE_TYPE (decl)) = decl;
361
362 if (TREE_CODE (decl) != CONST_DECL)
363 rest_of_decl_compilation (decl, global_bindings_p (), 0);
364 }
365 \f
366 /* Do little here. Set up the standard declarations later after the
367 front end has been run. */
368
369 void
370 gnat_init_decl_processing (void)
371 {
372 input_line = 0;
373
374 /* Make the binding_level structure for global names. */
375 current_function_decl = 0;
376 current_binding_level = 0;
377 free_binding_level = 0;
378 gnat_pushlevel ();
379
380 build_common_tree_nodes (true, true);
381
382 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
383 corresponding to the size of Pmode. In most cases when ptr_mode and
384 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
385 far better code using the width of Pmode. Make this here since we need
386 this before we can expand the GNAT types. */
387 size_type_node = gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0);
388 set_sizetype (size_type_node);
389 build_common_tree_nodes_2 (0);
390
391 /* Give names and make TYPE_DECLs for common types. */
392 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype),
393 Empty);
394 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
395 integer_type_node),
396 Empty);
397 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
398 char_type_node),
399 Empty);
400 gnat_pushdecl (build_decl (TYPE_DECL, get_identifier ("long integer"),
401 long_integer_type_node),
402 Empty);
403
404 ptr_void_type_node = build_pointer_type (void_type_node);
405
406 gnat_install_builtins ();
407 }
408
409 /* Install the builtin functions the middle-end needs. */
410
411 static void
412 gnat_install_builtins ()
413 {
414 /* Builtins used by generic optimizers. */
415 build_common_builtin_nodes ();
416
417 /* Target specific builtins, such as the AltiVec family on ppc. */
418 targetm.init_builtins ();
419 }
420
421 /* Create the predefined scalar types such as `integer_type_node' needed
422 in the gcc back-end and initialize the global binding level. */
423
424 void
425 init_gigi_decls (tree long_long_float_type, tree exception_type)
426 {
427 tree endlink, decl;
428 unsigned int i;
429
430 /* Set the types that GCC and Gigi use from the front end. We would like
431 to do this for char_type_node, but it needs to correspond to the C
432 char type. */
433 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
434 {
435 /* In this case, the builtin floating point types are VAX float,
436 so make up a type for use. */
437 longest_float_type_node = make_node (REAL_TYPE);
438 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
439 layout_type (longest_float_type_node);
440 create_type_decl (get_identifier ("longest float type"),
441 longest_float_type_node, NULL, false, true, Empty);
442 }
443 else
444 longest_float_type_node = TREE_TYPE (long_long_float_type);
445
446 except_type_node = TREE_TYPE (exception_type);
447
448 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
449 create_type_decl (get_identifier ("unsigned int"), unsigned_type_node,
450 NULL, false, true, Empty);
451
452 void_type_decl_node = create_type_decl (get_identifier ("void"),
453 void_type_node, NULL, false, true,
454 Empty);
455
456 void_ftype = build_function_type (void_type_node, NULL_TREE);
457 ptr_void_ftype = build_pointer_type (void_ftype);
458
459 /* Now declare runtime functions. */
460 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
461
462 /* malloc is a function declaration tree for a function to allocate
463 memory. */
464 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
465 NULL_TREE,
466 build_function_type (ptr_void_type_node,
467 tree_cons (NULL_TREE,
468 sizetype,
469 endlink)),
470 NULL_TREE, false, true, true, NULL,
471 Empty);
472
473 /* free is a function declaration tree for a function to free memory. */
474 free_decl
475 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
476 build_function_type (void_type_node,
477 tree_cons (NULL_TREE,
478 ptr_void_type_node,
479 endlink)),
480 NULL_TREE, false, true, true, NULL, Empty);
481
482 /* Make the types and functions used for exception processing. */
483 jmpbuf_type
484 = build_array_type (gnat_type_for_mode (Pmode, 0),
485 build_index_type (build_int_cst (NULL_TREE, 5)));
486 create_type_decl (get_identifier ("JMPBUF_T"), jmpbuf_type, NULL,
487 false, true, Empty);
488 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
489
490 /* Functions to get and set the jumpbuf pointer for the current thread. */
491 get_jmpbuf_decl
492 = create_subprog_decl
493 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
494 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
495 NULL_TREE, false, true, true, NULL, Empty);
496
497 set_jmpbuf_decl
498 = create_subprog_decl
499 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
500 NULL_TREE,
501 build_function_type (void_type_node,
502 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
503 NULL_TREE, false, true, true, NULL, Empty);
504
505 /* Function to get the current exception. */
506 get_excptr_decl
507 = create_subprog_decl
508 (get_identifier ("system__soft_links__get_gnat_exception"),
509 NULL_TREE,
510 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
511 NULL_TREE, false, true, true, NULL, Empty);
512
513 /* Functions that raise exceptions. */
514 raise_nodefer_decl
515 = create_subprog_decl
516 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
517 build_function_type (void_type_node,
518 tree_cons (NULL_TREE,
519 build_pointer_type (except_type_node),
520 endlink)),
521 NULL_TREE, false, true, true, NULL, Empty);
522
523 /* Dummy objects to materialize "others" and "all others" in the exception
524 tables. These are exported by a-exexpr.adb, so see this unit for the
525 types to use. */
526
527 others_decl
528 = create_var_decl (get_identifier ("OTHERS"),
529 get_identifier ("__gnat_others_value"),
530 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
531
532 all_others_decl
533 = create_var_decl (get_identifier ("ALL_OTHERS"),
534 get_identifier ("__gnat_all_others_value"),
535 integer_type_node, 0, 1, 0, 1, 1, 0, Empty);
536
537 /* Hooks to call when entering/leaving an exception handler. */
538 begin_handler_decl
539 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
540 build_function_type (void_type_node,
541 tree_cons (NULL_TREE,
542 ptr_void_type_node,
543 endlink)),
544 NULL_TREE, false, true, true, NULL, Empty);
545
546 end_handler_decl
547 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
548 build_function_type (void_type_node,
549 tree_cons (NULL_TREE,
550 ptr_void_type_node,
551 endlink)),
552 NULL_TREE, false, true, true, NULL, Empty);
553
554 /* If in no exception handlers mode, all raise statements are redirected to
555 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
556 this procedure will never be called in this mode. */
557 if (No_Exception_Handlers_Set ())
558 {
559 decl
560 = create_subprog_decl
561 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
562 build_function_type (void_type_node,
563 tree_cons (NULL_TREE,
564 build_pointer_type (char_type_node),
565 tree_cons (NULL_TREE,
566 integer_type_node,
567 endlink))),
568 NULL_TREE, false, true, true, NULL, Empty);
569
570 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
571 gnat_raise_decls[i] = decl;
572 }
573 else
574 /* Otherwise, make one decl for each exception reason. */
575 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
576 {
577 char name[17];
578
579 sprintf (name, "__gnat_rcheck_%.2d", i);
580 gnat_raise_decls[i]
581 = create_subprog_decl
582 (get_identifier (name), NULL_TREE,
583 build_function_type (void_type_node,
584 tree_cons (NULL_TREE,
585 build_pointer_type
586 (char_type_node),
587 tree_cons (NULL_TREE,
588 integer_type_node,
589 endlink))),
590 NULL_TREE, false, true, true, NULL, Empty);
591 }
592
593 /* Indicate that these never return. */
594 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
595 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
596 TREE_TYPE (raise_nodefer_decl)
597 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
598 TYPE_QUAL_VOLATILE);
599
600 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
601 {
602 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
603 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
604 TREE_TYPE (gnat_raise_decls[i])
605 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
606 TYPE_QUAL_VOLATILE);
607 }
608
609 /* setjmp returns an integer and has one operand, which is a pointer to
610 a jmpbuf. */
611 setjmp_decl
612 = create_subprog_decl
613 (get_identifier ("__builtin_setjmp"), NULL_TREE,
614 build_function_type (integer_type_node,
615 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
616 NULL_TREE, false, true, true, NULL, Empty);
617
618 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
619 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
620
621 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
622 address. */
623 update_setjmp_buf_decl
624 = create_subprog_decl
625 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE,
626 build_function_type (void_type_node,
627 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
628 NULL_TREE, false, true, true, NULL, Empty);
629
630 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl) = BUILT_IN_NORMAL;
631 DECL_FUNCTION_CODE (update_setjmp_buf_decl) = BUILT_IN_UPDATE_SETJMP_BUF;
632
633 main_identifier_node = get_identifier ("main");
634 }
635 \f
636 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL nodes
637 (FIELDLIST), finish constructing the record or union type. If HAS_REP is
638 true, this record has a rep clause; don't call layout_type but merely set
639 the size and alignment ourselves. If DEFER_DEBUG is true, do not call
640 the debugging routines on this type; it will be done later. */
641
642 void
643 finish_record_type (tree record_type, tree fieldlist, bool has_rep,
644 bool defer_debug)
645 {
646 enum tree_code code = TREE_CODE (record_type);
647 tree ada_size = bitsize_zero_node;
648 tree size = bitsize_zero_node;
649 bool var_size = false;
650 bool had_size = TYPE_SIZE (record_type) != 0;
651 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
652 tree field;
653
654 TYPE_FIELDS (record_type) = fieldlist;
655 TYPE_STUB_DECL (record_type)
656 = build_decl (TYPE_DECL, NULL_TREE, record_type);
657
658 /* We don't need both the typedef name and the record name output in
659 the debugging information, since they are the same. */
660 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
661
662 /* Globally initialize the record first. If this is a rep'ed record,
663 that just means some initializations; otherwise, layout the record. */
664
665 if (has_rep)
666 {
667 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
668 TYPE_MODE (record_type) = BLKmode;
669
670 if (!had_size_unit)
671 TYPE_SIZE_UNIT (record_type) = size_zero_node;
672
673 if (!had_size)
674 TYPE_SIZE (record_type) = bitsize_zero_node;
675 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
676 out just like a UNION_TYPE, since the size will be fixed. */
677 else if (code == QUAL_UNION_TYPE)
678 code = UNION_TYPE;
679 }
680 else
681 {
682 /* Ensure there isn't a size already set. There can be in an error
683 case where there is a rep clause but all fields have errors and
684 no longer have a position. */
685 TYPE_SIZE (record_type) = 0;
686 layout_type (record_type);
687 }
688
689 /* At this point, the position and size of each field is known. It was
690 either set before entry by a rep clause, or by laying out the type above.
691
692 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
693 to compute the Ada size; the GCC size and alignment (for rep'ed records
694 that are not padding types); and the mode (for rep'ed records). We also
695 clear the DECL_BIT_FIELD indication for the cases we know have not been
696 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
697
698 if (code == QUAL_UNION_TYPE)
699 fieldlist = nreverse (fieldlist);
700
701 for (field = fieldlist; field; field = TREE_CHAIN (field))
702 {
703 tree pos = bit_position (field);
704
705 tree type = TREE_TYPE (field);
706 tree this_size = DECL_SIZE (field);
707 tree this_ada_size = DECL_SIZE (field);
708
709 /* We need to make an XVE/XVU record if any field has variable size,
710 whether or not the record does. For example, if we have an union,
711 it may be that all fields, rounded up to the alignment, have the
712 same size, in which case we'll use that size. But the debug
713 output routines (except Dwarf2) won't be able to output the fields,
714 so we need to make the special record. */
715 if (TREE_CODE (this_size) != INTEGER_CST)
716 var_size = true;
717
718 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
719 || TREE_CODE (type) == QUAL_UNION_TYPE)
720 && !TYPE_IS_FAT_POINTER_P (type)
721 && !TYPE_CONTAINS_TEMPLATE_P (type)
722 && TYPE_ADA_SIZE (type))
723 this_ada_size = TYPE_ADA_SIZE (type);
724
725 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
726 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
727 && value_factor_p (pos, BITS_PER_UNIT)
728 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
729 DECL_BIT_FIELD (field) = 0;
730
731 /* If we still have DECL_BIT_FIELD set at this point, we know the field
732 is technically not addressable. Except that it can actually be
733 addressed if the field is BLKmode and happens to be properly
734 aligned. */
735 DECL_NONADDRESSABLE_P (field)
736 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
737
738 if (has_rep && !DECL_BIT_FIELD (field))
739 TYPE_ALIGN (record_type)
740 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
741
742 switch (code)
743 {
744 case UNION_TYPE:
745 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
746 size = size_binop (MAX_EXPR, size, this_size);
747 break;
748
749 case QUAL_UNION_TYPE:
750 ada_size
751 = fold (build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
752 this_ada_size, ada_size));
753 size = fold (build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
754 this_size, size));
755 break;
756
757 case RECORD_TYPE:
758 /* Since we know here that all fields are sorted in order of
759 increasing bit position, the size of the record is one
760 higher than the ending bit of the last field processed
761 unless we have a rep clause, since in that case we might
762 have a field outside a QUAL_UNION_TYPE that has a higher ending
763 position. So use a MAX in that case. Also, if this field is a
764 QUAL_UNION_TYPE, we need to take into account the previous size in
765 the case of empty variants. */
766 ada_size
767 = merge_sizes (ada_size, pos, this_ada_size,
768 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
769 size = merge_sizes (size, pos, this_size,
770 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
771 break;
772
773 default:
774 gcc_unreachable ();
775 }
776 }
777
778 if (code == QUAL_UNION_TYPE)
779 nreverse (fieldlist);
780
781 /* If this is a padding record, we never want to make the size smaller than
782 what was specified in it, if any. */
783 if (TREE_CODE (record_type) == RECORD_TYPE
784 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
785 size = TYPE_SIZE (record_type);
786
787 /* Now set any of the values we've just computed that apply. */
788 if (!TYPE_IS_FAT_POINTER_P (record_type)
789 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
790 SET_TYPE_ADA_SIZE (record_type, ada_size);
791
792 if (has_rep)
793 {
794 tree size_unit
795 = (had_size_unit ? TYPE_SIZE_UNIT (record_type)
796 : convert (sizetype, size_binop (CEIL_DIV_EXPR, size,
797 bitsize_unit_node)));
798
799 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
800 TYPE_SIZE_UNIT (record_type)
801 = round_up (size_unit, TYPE_ALIGN (record_type) / BITS_PER_UNIT);
802
803 compute_record_mode (record_type);
804 }
805
806 if (!defer_debug)
807 {
808 /* If this record is of variable size, rename it so that the
809 debugger knows it is and make a new, parallel, record
810 that tells the debugger how the record is laid out. See
811 exp_dbug.ads. But don't do this for records that are padding
812 since they confuse GDB. */
813 if (var_size
814 && !(TREE_CODE (record_type) == RECORD_TYPE
815 && TYPE_IS_PADDING_P (record_type)))
816 {
817 tree new_record_type
818 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
819 ? UNION_TYPE : TREE_CODE (record_type));
820 tree orig_name = TYPE_NAME (record_type);
821 tree orig_id
822 = (TREE_CODE (orig_name) == TYPE_DECL ? DECL_NAME (orig_name)
823 : orig_name);
824 tree new_id
825 = concat_id_with_name (orig_id,
826 TREE_CODE (record_type) == QUAL_UNION_TYPE
827 ? "XVU" : "XVE");
828 tree last_pos = bitsize_zero_node;
829 tree old_field;
830 tree prev_old_field = 0;
831
832 TYPE_NAME (new_record_type) = new_id;
833 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
834 TYPE_STUB_DECL (new_record_type)
835 = build_decl (TYPE_DECL, NULL_TREE, new_record_type);
836 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
837 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
838 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
839 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
840 TYPE_SIZE_UNIT (new_record_type)
841 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
842
843 /* Now scan all the fields, replacing each field with a new
844 field corresponding to the new encoding. */
845 for (old_field = TYPE_FIELDS (record_type); old_field;
846 old_field = TREE_CHAIN (old_field))
847 {
848 tree field_type = TREE_TYPE (old_field);
849 tree field_name = DECL_NAME (old_field);
850 tree new_field;
851 tree curpos = bit_position (old_field);
852 bool var = false;
853 unsigned int align = 0;
854 tree pos;
855
856 /* See how the position was modified from the last position.
857
858 There are two basic cases we support: a value was added
859 to the last position or the last position was rounded to
860 a boundary and they something was added. Check for the
861 first case first. If not, see if there is any evidence
862 of rounding. If so, round the last position and try
863 again.
864
865 If this is a union, the position can be taken as zero. */
866
867 if (TREE_CODE (new_record_type) == UNION_TYPE)
868 pos = bitsize_zero_node, align = 0;
869 else
870 pos = compute_related_constant (curpos, last_pos);
871
872 if (!pos && TREE_CODE (curpos) == MULT_EXPR
873 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
874 {
875 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
876 pos = compute_related_constant (curpos,
877 round_up (last_pos, align));
878 }
879 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
880 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
881 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
882 && host_integerp (TREE_OPERAND
883 (TREE_OPERAND (curpos, 0), 1),
884 1))
885 {
886 align
887 = tree_low_cst
888 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
889 pos = compute_related_constant (curpos,
890 round_up (last_pos, align));
891 }
892 else if (potential_alignment_gap (prev_old_field, old_field,
893 pos))
894 {
895 align = TYPE_ALIGN (field_type);
896 pos = compute_related_constant (curpos,
897 round_up (last_pos, align));
898 }
899
900 /* If we can't compute a position, set it to zero.
901
902 ??? We really should abort here, but it's too much work
903 to get this correct for all cases. */
904
905 if (!pos)
906 pos = bitsize_zero_node;
907
908 /* See if this type is variable-size and make a new type
909 and indicate the indirection if so. */
910 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
911 {
912 field_type = build_pointer_type (field_type);
913 var = true;
914 }
915
916 /* Make a new field name, if necessary. */
917 if (var || align != 0)
918 {
919 char suffix[6];
920
921 if (align != 0)
922 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
923 align / BITS_PER_UNIT);
924 else
925 strcpy (suffix, "XVL");
926
927 field_name = concat_id_with_name (field_name, suffix);
928 }
929
930 new_field = create_field_decl (field_name, field_type,
931 new_record_type, 0,
932 DECL_SIZE (old_field), pos, 0);
933 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
934 TYPE_FIELDS (new_record_type) = new_field;
935
936 /* If old_field is a QUAL_UNION_TYPE, take its size as being
937 zero. The only time it's not the last field of the record
938 is when there are other components at fixed positions after
939 it (meaning there was a rep clause for every field) and we
940 want to be able to encode them. */
941 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
942 (TREE_CODE (TREE_TYPE (old_field))
943 == QUAL_UNION_TYPE)
944 ? bitsize_zero_node
945 : DECL_SIZE (old_field));
946 prev_old_field = old_field;
947 }
948
949 TYPE_FIELDS (new_record_type)
950 = nreverse (TYPE_FIELDS (new_record_type));
951
952 rest_of_type_compilation (new_record_type, global_bindings_p ());
953 }
954
955 rest_of_type_compilation (record_type, global_bindings_p ());
956 }
957 }
958
959 /* Utility function of above to merge LAST_SIZE, the previous size of a record
960 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
961 if this represents a QUAL_UNION_TYPE in which case we must look for
962 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
963 is nonzero, we must take the MAX of the end position of this field
964 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
965
966 We return an expression for the size. */
967
968 static tree
969 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
970 bool has_rep)
971 {
972 tree type = TREE_TYPE (last_size);
973 tree new;
974
975 if (!special || TREE_CODE (size) != COND_EXPR)
976 {
977 new = size_binop (PLUS_EXPR, first_bit, size);
978 if (has_rep)
979 new = size_binop (MAX_EXPR, last_size, new);
980 }
981
982 else
983 new = fold (build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
984 integer_zerop (TREE_OPERAND (size, 1))
985 ? last_size : merge_sizes (last_size, first_bit,
986 TREE_OPERAND (size, 1),
987 1, has_rep),
988 integer_zerop (TREE_OPERAND (size, 2))
989 ? last_size : merge_sizes (last_size, first_bit,
990 TREE_OPERAND (size, 2),
991 1, has_rep)));
992
993 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
994 when fed through substitute_in_expr) into thinking that a constant
995 size is not constant. */
996 while (TREE_CODE (new) == NON_LVALUE_EXPR)
997 new = TREE_OPERAND (new, 0);
998
999 return new;
1000 }
1001
1002 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1003 related by the addition of a constant. Return that constant if so. */
1004
1005 static tree
1006 compute_related_constant (tree op0, tree op1)
1007 {
1008 tree op0_var, op1_var;
1009 tree op0_con = split_plus (op0, &op0_var);
1010 tree op1_con = split_plus (op1, &op1_var);
1011 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1012
1013 if (operand_equal_p (op0_var, op1_var, 0))
1014 return result;
1015 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1016 return result;
1017 else
1018 return 0;
1019 }
1020
1021 /* Utility function of above to split a tree OP which may be a sum, into a
1022 constant part, which is returned, and a variable part, which is stored
1023 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1024 bitsizetype. */
1025
1026 static tree
1027 split_plus (tree in, tree *pvar)
1028 {
1029 /* Strip NOPS in order to ease the tree traversal and maximize the
1030 potential for constant or plus/minus discovery. We need to be careful
1031 to always return and set *pvar to bitsizetype trees, but it's worth
1032 the effort. */
1033 STRIP_NOPS (in);
1034
1035 *pvar = convert (bitsizetype, in);
1036
1037 if (TREE_CODE (in) == INTEGER_CST)
1038 {
1039 *pvar = bitsize_zero_node;
1040 return convert (bitsizetype, in);
1041 }
1042 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1043 {
1044 tree lhs_var, rhs_var;
1045 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1046 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1047
1048 if (lhs_var == TREE_OPERAND (in, 0)
1049 && rhs_var == TREE_OPERAND (in, 1))
1050 return bitsize_zero_node;
1051
1052 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1053 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1054 }
1055 else
1056 return bitsize_zero_node;
1057 }
1058 \f
1059 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1060 subprogram. If it is void_type_node, then we are dealing with a procedure,
1061 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1062 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1063 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1064 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1065 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1066 RETURNS_WITH_DSP is nonzero if the function is to return with a
1067 depressed stack pointer. RETURNS_BY_TARGET_PTR is true if the function
1068 is to be passed (as its first parameter) the address of the place to copy
1069 its result. */
1070
1071 tree
1072 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1073 bool returns_unconstrained, bool returns_by_ref,
1074 bool returns_with_dsp, bool returns_by_target_ptr)
1075 {
1076 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1077 the subprogram formal parameters. This list is generated by traversing the
1078 input list of PARM_DECL nodes. */
1079 tree param_type_list = NULL;
1080 tree param_decl;
1081 tree type;
1082
1083 for (param_decl = param_decl_list; param_decl;
1084 param_decl = TREE_CHAIN (param_decl))
1085 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1086 param_type_list);
1087
1088 /* The list of the function parameter types has to be terminated by the void
1089 type to signal to the back-end that we are not dealing with a variable
1090 parameter subprogram, but that the subprogram has a fixed number of
1091 parameters. */
1092 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1093
1094 /* The list of argument types has been created in reverse
1095 so nreverse it. */
1096 param_type_list = nreverse (param_type_list);
1097
1098 type = build_function_type (return_type, param_type_list);
1099
1100 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1101 or the new type should, make a copy of TYPE. Likewise for
1102 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1103 if (TYPE_CI_CO_LIST (type) || cico_list
1104 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1105 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1106 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1107 type = copy_type (type);
1108
1109 TYPE_CI_CO_LIST (type) = cico_list;
1110 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1111 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1112 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1113 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1114 return type;
1115 }
1116 \f
1117 /* Return a copy of TYPE but safe to modify in any way. */
1118
1119 tree
1120 copy_type (tree type)
1121 {
1122 tree new = copy_node (type);
1123
1124 /* copy_node clears this field instead of copying it, because it is
1125 aliased with TREE_CHAIN. */
1126 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1127
1128 TYPE_POINTER_TO (new) = 0;
1129 TYPE_REFERENCE_TO (new) = 0;
1130 TYPE_MAIN_VARIANT (new) = new;
1131 TYPE_NEXT_VARIANT (new) = 0;
1132
1133 return new;
1134 }
1135 \f
1136 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1137 TYPE_INDEX_TYPE is INDEX. */
1138
1139 tree
1140 create_index_type (tree min, tree max, tree index)
1141 {
1142 /* First build a type for the desired range. */
1143 tree type = build_index_2_type (min, max);
1144
1145 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1146 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1147 is set, but not to INDEX, make a copy of this type with the requested
1148 index type. Note that we have no way of sharing these types, but that's
1149 only a small hole. */
1150 if (TYPE_INDEX_TYPE (type) == index)
1151 return type;
1152 else if (TYPE_INDEX_TYPE (type))
1153 type = copy_type (type);
1154
1155 SET_TYPE_INDEX_TYPE (type, index);
1156 create_type_decl (NULL_TREE, type, NULL, true, false, Empty);
1157 return type;
1158 }
1159 \f
1160 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1161 string) and TYPE is a ..._TYPE node giving its data type.
1162 ARTIFICIAL_P is true if this is a declaration that was generated
1163 by the compiler. DEBUG_INFO_P is true if we need to write debugging
1164 information about this type. GNAT_NODE is used for the position of
1165 the decl. */
1166
1167 tree
1168 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1169 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1170 {
1171 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1172 enum tree_code code = TREE_CODE (type);
1173
1174 DECL_ARTIFICIAL (type_decl) = artificial_p;
1175
1176 process_attributes (type_decl, attr_list);
1177
1178 /* Pass type declaration information to the debugger unless this is an
1179 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1180 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1181 a dummy type, which will be completed later, or a type for which
1182 debugging information was not requested. */
1183 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1184 || !debug_info_p)
1185 DECL_IGNORED_P (type_decl) = 1;
1186 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1187 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1188 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1189 rest_of_decl_compilation (type_decl, global_bindings_p (), 0);
1190
1191 if (!TYPE_IS_DUMMY_P (type))
1192 gnat_pushdecl (type_decl, gnat_node);
1193
1194 return type_decl;
1195 }
1196
1197 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1198 ASM_NAME is its assembler name (if provided). TYPE is its data type
1199 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1200 expression; NULL_TREE if none.
1201
1202 CONST_FLAG is true if this variable is constant.
1203
1204 PUBLIC_FLAG is true if this definition is to be made visible outside of
1205 the current compilation unit. This flag should be set when processing the
1206 variable definitions in a package specification. EXTERN_FLAG is nonzero
1207 when processing an external variable declaration (as opposed to a
1208 definition: no storage is to be allocated for the variable here).
1209
1210 STATIC_FLAG is only relevant when not at top level. In that case
1211 it indicates whether to always allocate storage to the variable.
1212
1213 GNAT_NODE is used for the position of the decl. */
1214
1215 tree
1216 create_var_decl (tree var_name, tree asm_name, tree type, tree var_init,
1217 bool const_flag, bool public_flag, bool extern_flag,
1218 bool static_flag, struct attrib *attr_list, Node_Id gnat_node)
1219 {
1220 bool init_const
1221 = (!var_init
1222 ? false
1223 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1224 && (global_bindings_p () || static_flag
1225 ? 0 != initializer_constant_valid_p (var_init,
1226 TREE_TYPE (var_init))
1227 : TREE_CONSTANT (var_init))));
1228 tree var_decl
1229 = build_decl ((const_flag && init_const
1230 /* Only make a CONST_DECL for sufficiently-small objects.
1231 We consider complex double "sufficiently-small" */
1232 && TYPE_SIZE (type) != 0
1233 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1234 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1235 GET_MODE_SIZE (DCmode)))
1236 ? CONST_DECL : VAR_DECL, var_name, type);
1237
1238 /* If this is external, throw away any initializations unless this is a
1239 CONST_DECL (meaning we have a constant); they will be done elsewhere.
1240 If we are defining a global here, leave a constant initialization and
1241 save any variable elaborations for the elaboration routine. If we are
1242 just annotating types, throw away the initialization if it isn't a
1243 constant. */
1244 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1245 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1246 var_init = NULL_TREE;
1247
1248 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1249 try to fiddle with DECL_COMMON. However, on platforms that don't
1250 support global BSS sections, uninitialized global variables would
1251 go in DATA instead, thus increasing the size of the executable. */
1252 #if !defined(ASM_OUTPUT_BSS) && !defined(ASM_OUTPUT_ALIGNED_BSS)
1253 DECL_COMMON (var_decl) = !flag_no_common;
1254 #endif
1255 DECL_INITIAL (var_decl) = var_init;
1256 TREE_READONLY (var_decl) = const_flag;
1257 DECL_EXTERNAL (var_decl) = extern_flag;
1258 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1259 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1260 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1261 = TYPE_VOLATILE (type);
1262
1263 /* If it's public and not external, always allocate storage for it.
1264 At the global binding level we need to allocate static storage for the
1265 variable if and only if it's not external. If we are not at the top level
1266 we allocate automatic storage unless requested not to. */
1267 TREE_STATIC (var_decl)
1268 = public_flag || (global_bindings_p () ? !extern_flag : static_flag);
1269
1270 if (asm_name)
1271 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1272
1273 process_attributes (var_decl, attr_list);
1274
1275 /* Add this decl to the current binding level. */
1276 gnat_pushdecl (var_decl, gnat_node);
1277
1278 if (TREE_SIDE_EFFECTS (var_decl))
1279 TREE_ADDRESSABLE (var_decl) = 1;
1280
1281 if (TREE_CODE (var_decl) != CONST_DECL)
1282 rest_of_decl_compilation (var_decl, global_bindings_p (), 0);
1283
1284 return var_decl;
1285 }
1286 \f
1287 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1288 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1289 this field is in a record type with a "pragma pack". If SIZE is nonzero
1290 it is the specified size for this field. If POS is nonzero, it is the bit
1291 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1292 the address of this field for aliasing purposes. If it is negative, we
1293 should not make a bitfield, which is used by make_aligning_type. */
1294
1295 tree
1296 create_field_decl (tree field_name, tree field_type, tree record_type,
1297 int packed, tree size, tree pos, int addressable)
1298 {
1299 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1300
1301 DECL_CONTEXT (field_decl) = record_type;
1302 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1303
1304 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1305 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1306 if (packed && TYPE_MODE (field_type) == BLKmode)
1307 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1308
1309 /* If a size is specified, use it. Otherwise, if the record type is packed
1310 compute a size to use, which may differ from the object's natural size.
1311 We always set a size in this case to trigger the checks for bitfield
1312 creation below, which is typically required when no position has been
1313 specified. */
1314 if (size)
1315 size = convert (bitsizetype, size);
1316 else if (packed == 1)
1317 {
1318 size = rm_size (field_type);
1319
1320 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1321 byte. */
1322 if (TREE_CODE (size) == INTEGER_CST
1323 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1324 size = round_up (size, BITS_PER_UNIT);
1325 }
1326
1327 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1328 specified for two reasons: first if the size differs from the natural
1329 size. Second, if the alignment is insufficient. There are a number of
1330 ways the latter can be true.
1331
1332 We never make a bitfield if the type of the field has a nonconstant size,
1333 because no such entity requiring bitfield operations should reach here.
1334
1335 We do *preventively* make a bitfield when there might be the need for it
1336 but we don't have all the necessary information to decide, as is the case
1337 of a field with no specified position in a packed record.
1338
1339 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1340 in layout_decl or finish_record_type to clear the bit_field indication if
1341 it is in fact not needed. */
1342 if (addressable >= 0
1343 && size
1344 && TREE_CODE (size) == INTEGER_CST
1345 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1346 && (!operand_equal_p (TYPE_SIZE (field_type), size, 0)
1347 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1348 || packed
1349 || (TYPE_ALIGN (record_type) != 0
1350 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1351 {
1352 DECL_BIT_FIELD (field_decl) = 1;
1353 DECL_SIZE (field_decl) = size;
1354 if (!packed && !pos)
1355 DECL_ALIGN (field_decl)
1356 = (TYPE_ALIGN (record_type) != 0
1357 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1358 : TYPE_ALIGN (field_type));
1359 }
1360
1361 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1362 DECL_ALIGN (field_decl)
1363 = MAX (DECL_ALIGN (field_decl),
1364 DECL_BIT_FIELD (field_decl) ? 1
1365 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1366 : TYPE_ALIGN (field_type));
1367
1368 if (pos)
1369 {
1370 /* We need to pass in the alignment the DECL is known to have.
1371 This is the lowest-order bit set in POS, but no more than
1372 the alignment of the record, if one is specified. Note
1373 that an alignment of 0 is taken as infinite. */
1374 unsigned int known_align;
1375
1376 if (host_integerp (pos, 1))
1377 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1378 else
1379 known_align = BITS_PER_UNIT;
1380
1381 if (TYPE_ALIGN (record_type)
1382 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1383 known_align = TYPE_ALIGN (record_type);
1384
1385 layout_decl (field_decl, known_align);
1386 SET_DECL_OFFSET_ALIGN (field_decl,
1387 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1388 : BITS_PER_UNIT);
1389 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1390 &DECL_FIELD_BIT_OFFSET (field_decl),
1391 DECL_OFFSET_ALIGN (field_decl), pos);
1392
1393 DECL_HAS_REP_P (field_decl) = 1;
1394 }
1395
1396 /* If the field type is passed by reference, we will have pointers to the
1397 field, so it is addressable. */
1398 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1399 addressable = 1;
1400
1401 /* ??? For now, we say that any field of aggregate type is addressable
1402 because the front end may take 'Reference of it. */
1403 if (AGGREGATE_TYPE_P (field_type))
1404 addressable = 1;
1405
1406 /* Mark the decl as nonaddressable if it is indicated so semantically,
1407 meaning we won't ever attempt to take the address of the field.
1408
1409 It may also be "technically" nonaddressable, meaning that even if we
1410 attempt to take the field's address we will actually get the address of a
1411 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1412 we have at this point is not accurate enough, so we don't account for
1413 this here and let finish_record_type decide. */
1414 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1415
1416 return field_decl;
1417 }
1418
1419 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1420 effects, has the value of zero. */
1421
1422 static bool
1423 value_zerop (tree exp)
1424 {
1425 if (TREE_CODE (exp) == COMPOUND_EXPR)
1426 return value_zerop (TREE_OPERAND (exp, 1));
1427
1428 return integer_zerop (exp);
1429 }
1430 \f
1431 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1432 PARAM_TYPE is its type. READONLY is true if the parameter is
1433 readonly (either an IN parameter or an address of a pass-by-ref
1434 parameter). */
1435
1436 tree
1437 create_param_decl (tree param_name, tree param_type, bool readonly)
1438 {
1439 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1440
1441 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1442 lead to various ABI violations. */
1443 if (targetm.calls.promote_prototypes (param_type)
1444 && (TREE_CODE (param_type) == INTEGER_TYPE
1445 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1446 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1447 {
1448 /* We have to be careful about biased types here. Make a subtype
1449 of integer_type_node with the proper biasing. */
1450 if (TREE_CODE (param_type) == INTEGER_TYPE
1451 && TYPE_BIASED_REPRESENTATION_P (param_type))
1452 {
1453 param_type
1454 = copy_type (build_range_type (integer_type_node,
1455 TYPE_MIN_VALUE (param_type),
1456 TYPE_MAX_VALUE (param_type)));
1457
1458 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1459 }
1460 else
1461 param_type = integer_type_node;
1462 }
1463
1464 DECL_ARG_TYPE (param_decl) = param_type;
1465 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1466 TREE_READONLY (param_decl) = readonly;
1467 return param_decl;
1468 }
1469 \f
1470 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1471
1472 void
1473 process_attributes (tree decl, struct attrib *attr_list)
1474 {
1475 for (; attr_list; attr_list = attr_list->next)
1476 switch (attr_list->type)
1477 {
1478 case ATTR_MACHINE_ATTRIBUTE:
1479 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1480 NULL_TREE),
1481 ATTR_FLAG_TYPE_IN_PLACE);
1482 break;
1483
1484 case ATTR_LINK_ALIAS:
1485 TREE_STATIC (decl) = 1;
1486 assemble_alias (decl, attr_list->name);
1487 break;
1488
1489 case ATTR_WEAK_EXTERNAL:
1490 if (SUPPORTS_WEAK)
1491 declare_weak (decl);
1492 else
1493 post_error ("?weak declarations not supported on this target",
1494 attr_list->error_point);
1495 break;
1496
1497 case ATTR_LINK_SECTION:
1498 if (targetm.have_named_sections)
1499 {
1500 DECL_SECTION_NAME (decl)
1501 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1502 IDENTIFIER_POINTER (attr_list->name));
1503 DECL_COMMON (decl) = 0;
1504 }
1505 else
1506 post_error ("?section attributes are not supported for this target",
1507 attr_list->error_point);
1508 break;
1509 }
1510 }
1511 \f
1512 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1513 a power of 2. */
1514
1515 static bool
1516 value_factor_p (tree value, HOST_WIDE_INT factor)
1517 {
1518 if (host_integerp (value, 1))
1519 return tree_low_cst (value, 1) % factor == 0;
1520
1521 if (TREE_CODE (value) == MULT_EXPR)
1522 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1523 || value_factor_p (TREE_OPERAND (value, 1), factor));
1524
1525 return 0;
1526 }
1527
1528 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1529 unless we can prove these 2 fields are laid out in such a way that no gap
1530 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1531 is the distance in bits between the end of PREV_FIELD and the starting
1532 position of CURR_FIELD. It is ignored if null. */
1533
1534 static bool
1535 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1536 {
1537 /* If this is the first field of the record, there cannot be any gap */
1538 if (!prev_field)
1539 return false;
1540
1541 /* If the previous field is a union type, then return False: The only
1542 time when such a field is not the last field of the record is when
1543 there are other components at fixed positions after it (meaning there
1544 was a rep clause for every field), in which case we don't want the
1545 alignment constraint to override them. */
1546 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1547 return false;
1548
1549 /* If the distance between the end of prev_field and the begining of
1550 curr_field is constant, then there is a gap if the value of this
1551 constant is not null. */
1552 if (offset && host_integerp (offset, 1))
1553 return !integer_zerop (offset);
1554
1555 /* If the size and position of the previous field are constant,
1556 then check the sum of this size and position. There will be a gap
1557 iff it is not multiple of the current field alignment. */
1558 if (host_integerp (DECL_SIZE (prev_field), 1)
1559 && host_integerp (bit_position (prev_field), 1))
1560 return ((tree_low_cst (bit_position (prev_field), 1)
1561 + tree_low_cst (DECL_SIZE (prev_field), 1))
1562 % DECL_ALIGN (curr_field) != 0);
1563
1564 /* If both the position and size of the previous field are multiples
1565 of the current field alignment, there can not be any gap. */
1566 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1567 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1568 return false;
1569
1570 /* Fallback, return that there may be a potential gap */
1571 return true;
1572 }
1573
1574 /* Returns a LABEL_DECL node for LABEL_NAME. */
1575
1576 tree
1577 create_label_decl (tree label_name)
1578 {
1579 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1580
1581 DECL_CONTEXT (label_decl) = current_function_decl;
1582 DECL_MODE (label_decl) = VOIDmode;
1583 DECL_SOURCE_LOCATION (label_decl) = input_location;
1584
1585 return label_decl;
1586 }
1587 \f
1588 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1589 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1590 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1591 PARM_DECL nodes chained through the TREE_CHAIN field).
1592
1593 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1594 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1595
1596 tree
1597 create_subprog_decl (tree subprog_name, tree asm_name,
1598 tree subprog_type, tree param_decl_list, bool inline_flag,
1599 bool public_flag, bool extern_flag,
1600 struct attrib *attr_list, Node_Id gnat_node)
1601 {
1602 tree return_type = TREE_TYPE (subprog_type);
1603 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1604
1605 /* If this is a function nested inside an inlined external function, it
1606 means we aren't going to compile the outer function unless it is
1607 actually inlined, so do the same for us. */
1608 if (current_function_decl && DECL_INLINE (current_function_decl)
1609 && DECL_EXTERNAL (current_function_decl))
1610 extern_flag = true;
1611
1612 DECL_EXTERNAL (subprog_decl) = extern_flag;
1613 TREE_PUBLIC (subprog_decl) = public_flag;
1614 TREE_STATIC (subprog_decl) = 1;
1615 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1616 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1617 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1618 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1619 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1620 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1621 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1622
1623 if (inline_flag)
1624 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
1625
1626 if (asm_name)
1627 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1628
1629 process_attributes (subprog_decl, attr_list);
1630
1631 /* Add this decl to the current binding level. */
1632 gnat_pushdecl (subprog_decl, gnat_node);
1633
1634 /* Output the assembler code and/or RTL for the declaration. */
1635 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1636
1637 return subprog_decl;
1638 }
1639 \f
1640 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1641 body. This routine needs to be invoked before processing the declarations
1642 appearing in the subprogram. */
1643
1644 void
1645 begin_subprog_body (tree subprog_decl)
1646 {
1647 tree param_decl;
1648
1649 current_function_decl = subprog_decl;
1650 announce_function (subprog_decl);
1651
1652 /* Enter a new binding level and show that all the parameters belong to
1653 this function. */
1654 gnat_pushlevel ();
1655 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1656 param_decl = TREE_CHAIN (param_decl))
1657 DECL_CONTEXT (param_decl) = subprog_decl;
1658
1659 make_decl_rtl (subprog_decl);
1660
1661 /* We handle pending sizes via the elaboration of types, so we don't need to
1662 save them. This causes them to be marked as part of the outer function
1663 and then discarded. */
1664 get_pending_sizes ();
1665 }
1666
1667 /* Finish the definition of the current subprogram and compile it all the way
1668 to assembler language output. BODY is the tree corresponding to
1669 the subprogram. */
1670
1671 void
1672 end_subprog_body (tree body)
1673 {
1674 tree fndecl = current_function_decl;
1675
1676 /* Mark the BLOCK for this level as being for this function and pop the
1677 level. Since the vars in it are the parameters, clear them. */
1678 BLOCK_VARS (current_binding_level->block) = 0;
1679 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1680 DECL_INITIAL (fndecl) = current_binding_level->block;
1681 gnat_poplevel ();
1682
1683 /* Deal with inline. If declared inline or we should default to inline,
1684 set the flag in the decl. */
1685 DECL_INLINE (fndecl)
1686 = DECL_DECLARED_INLINE_P (fndecl) || flag_inline_trees == 2;
1687
1688 /* We handle pending sizes via the elaboration of types, so we don't
1689 need to save them. */
1690 get_pending_sizes ();
1691
1692 /* Mark the RESULT_DECL as being in this subprogram. */
1693 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1694
1695 DECL_SAVED_TREE (fndecl) = body;
1696
1697 current_function_decl = DECL_CONTEXT (fndecl);
1698 cfun = NULL;
1699
1700 /* If we're only annotating types, don't actually compile this function. */
1701 if (type_annotate_only)
1702 return;
1703
1704 /* We do different things for nested and non-nested functions.
1705 ??? This should be in cgraph. */
1706 if (!DECL_CONTEXT (fndecl))
1707 {
1708 gnat_gimplify_function (fndecl);
1709 cgraph_finalize_function (fndecl, false);
1710 }
1711 else
1712 /* Register this function with cgraph just far enough to get it
1713 added to our parent's nested function list. */
1714 (void) cgraph_node (fndecl);
1715 }
1716
1717 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
1718
1719 static void
1720 gnat_gimplify_function (tree fndecl)
1721 {
1722 struct cgraph_node *cgn;
1723
1724 dump_function (TDI_original, fndecl);
1725 gimplify_function_tree (fndecl);
1726 dump_function (TDI_generic, fndecl);
1727
1728 /* Convert all nested functions to GIMPLE now. We do things in this order
1729 so that items like VLA sizes are expanded properly in the context of the
1730 correct function. */
1731 cgn = cgraph_node (fndecl);
1732 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1733 gnat_gimplify_function (cgn->decl);
1734 }
1735 \f
1736 /* Return a definition for a builtin function named NAME and whose data type
1737 is TYPE. TYPE should be a function type with argument types.
1738 FUNCTION_CODE tells later passes how to compile calls to this function.
1739 See tree.h for its possible values.
1740
1741 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1742 the name to be called if we can't opencode the function. If
1743 ATTRS is nonzero, use that for the function attribute list. */
1744
1745 tree
1746 builtin_function (const char *name, tree type, int function_code,
1747 enum built_in_class class, const char *library_name,
1748 tree attrs)
1749 {
1750 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1751
1752 DECL_EXTERNAL (decl) = 1;
1753 TREE_PUBLIC (decl) = 1;
1754 if (library_name)
1755 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1756
1757 gnat_pushdecl (decl, Empty);
1758 DECL_BUILT_IN_CLASS (decl) = class;
1759 DECL_FUNCTION_CODE (decl) = function_code;
1760 if (attrs)
1761 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1762 return decl;
1763 }
1764
1765 /* Return an integer type with the number of bits of precision given by
1766 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1767 it is a signed type. */
1768
1769 tree
1770 gnat_type_for_size (unsigned precision, int unsignedp)
1771 {
1772 tree t;
1773 char type_name[20];
1774
1775 if (precision <= 2 * MAX_BITS_PER_WORD
1776 && signed_and_unsigned_types[precision][unsignedp])
1777 return signed_and_unsigned_types[precision][unsignedp];
1778
1779 if (unsignedp)
1780 t = make_unsigned_type (precision);
1781 else
1782 t = make_signed_type (precision);
1783
1784 if (precision <= 2 * MAX_BITS_PER_WORD)
1785 signed_and_unsigned_types[precision][unsignedp] = t;
1786
1787 if (!TYPE_NAME (t))
1788 {
1789 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1790 TYPE_NAME (t) = get_identifier (type_name);
1791 }
1792
1793 return t;
1794 }
1795
1796 /* Likewise for floating-point types. */
1797
1798 static tree
1799 float_type_for_precision (int precision, enum machine_mode mode)
1800 {
1801 tree t;
1802 char type_name[20];
1803
1804 if (float_types[(int) mode])
1805 return float_types[(int) mode];
1806
1807 float_types[(int) mode] = t = make_node (REAL_TYPE);
1808 TYPE_PRECISION (t) = precision;
1809 layout_type (t);
1810
1811 gcc_assert (TYPE_MODE (t) == mode);
1812 if (!TYPE_NAME (t))
1813 {
1814 sprintf (type_name, "FLOAT_%d", precision);
1815 TYPE_NAME (t) = get_identifier (type_name);
1816 }
1817
1818 return t;
1819 }
1820
1821 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1822 an unsigned type; otherwise a signed type is returned. */
1823
1824 tree
1825 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1826 {
1827 if (mode == BLKmode)
1828 return NULL_TREE;
1829 else if (mode == VOIDmode)
1830 return void_type_node;
1831 else if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1832 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1833 else
1834 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
1835 }
1836
1837 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
1838
1839 tree
1840 gnat_unsigned_type (tree type_node)
1841 {
1842 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
1843
1844 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1845 {
1846 type = copy_node (type);
1847 TREE_TYPE (type) = type_node;
1848 }
1849 else if (TREE_TYPE (type_node)
1850 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1851 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1852 {
1853 type = copy_node (type);
1854 TREE_TYPE (type) = TREE_TYPE (type_node);
1855 }
1856
1857 return type;
1858 }
1859
1860 /* Return the signed version of a TYPE_NODE, a scalar type. */
1861
1862 tree
1863 gnat_signed_type (tree type_node)
1864 {
1865 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
1866
1867 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
1868 {
1869 type = copy_node (type);
1870 TREE_TYPE (type) = type_node;
1871 }
1872 else if (TREE_TYPE (type_node)
1873 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
1874 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
1875 {
1876 type = copy_node (type);
1877 TREE_TYPE (type) = TREE_TYPE (type_node);
1878 }
1879
1880 return type;
1881 }
1882
1883 /* Return a type the same as TYPE except unsigned or signed according to
1884 UNSIGNEDP. */
1885
1886 tree
1887 gnat_signed_or_unsigned_type (int unsignedp, tree type)
1888 {
1889 if (!INTEGRAL_TYPE_P (type) || TYPE_UNSIGNED (type) == unsignedp)
1890 return type;
1891 else
1892 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
1893 }
1894 \f
1895 /* EXP is an expression for the size of an object. If this size contains
1896 discriminant references, replace them with the maximum (if MAX_P) or
1897 minimum (if !MAX_P) possible value of the discriminant. */
1898
1899 tree
1900 max_size (tree exp, bool max_p)
1901 {
1902 enum tree_code code = TREE_CODE (exp);
1903 tree type = TREE_TYPE (exp);
1904
1905 switch (TREE_CODE_CLASS (code))
1906 {
1907 case tcc_declaration:
1908 case tcc_constant:
1909 return exp;
1910
1911 case tcc_exceptional:
1912 if (code == TREE_LIST)
1913 return tree_cons (TREE_PURPOSE (exp),
1914 max_size (TREE_VALUE (exp), max_p),
1915 TREE_CHAIN (exp)
1916 ? max_size (TREE_CHAIN (exp), max_p) : NULL_TREE);
1917 break;
1918
1919 case tcc_reference:
1920 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
1921 modify. Otherwise, we treat it like a variable. */
1922 if (!CONTAINS_PLACEHOLDER_P (exp))
1923 return exp;
1924
1925 type = TREE_TYPE (TREE_OPERAND (exp, 1));
1926 return
1927 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
1928
1929 case tcc_comparison:
1930 return max_p ? size_one_node : size_zero_node;
1931
1932 case tcc_unary:
1933 case tcc_binary:
1934 case tcc_expression:
1935 switch (TREE_CODE_LENGTH (code))
1936 {
1937 case 1:
1938 if (code == NON_LVALUE_EXPR)
1939 return max_size (TREE_OPERAND (exp, 0), max_p);
1940 else
1941 return
1942 fold (build1 (code, type,
1943 max_size (TREE_OPERAND (exp, 0),
1944 code == NEGATE_EXPR ? !max_p : max_p)));
1945
1946 case 2:
1947 if (code == COMPOUND_EXPR)
1948 return max_size (TREE_OPERAND (exp, 1), max_p);
1949
1950 {
1951 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
1952 tree rhs = max_size (TREE_OPERAND (exp, 1),
1953 code == MINUS_EXPR ? !max_p : max_p);
1954
1955 /* Special-case wanting the maximum value of a MIN_EXPR.
1956 In that case, if one side overflows, return the other.
1957 sizetype is signed, but we know sizes are non-negative.
1958 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
1959 overflowing or the maximum possible value and the RHS
1960 a variable. */
1961 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
1962 return lhs;
1963 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
1964 return rhs;
1965 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1966 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
1967 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
1968 && !TREE_CONSTANT (rhs))
1969 return lhs;
1970 else
1971 return fold (build2 (code, type, lhs, rhs));
1972 }
1973
1974 case 3:
1975 if (code == SAVE_EXPR)
1976 return exp;
1977 else if (code == COND_EXPR)
1978 return fold (build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
1979 max_size (TREE_OPERAND (exp, 1), max_p),
1980 max_size (TREE_OPERAND (exp, 2), max_p)));
1981 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1))
1982 return build3 (CALL_EXPR, type, TREE_OPERAND (exp, 0),
1983 max_size (TREE_OPERAND (exp, 1), max_p), NULL);
1984 }
1985
1986 /* Other tree classes cannot happen. */
1987 default:
1988 break;
1989 }
1990
1991 gcc_unreachable ();
1992 }
1993 \f
1994 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
1995 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
1996 Return a constructor for the template. */
1997
1998 tree
1999 build_template (tree template_type, tree array_type, tree expr)
2000 {
2001 tree template_elts = NULL_TREE;
2002 tree bound_list = NULL_TREE;
2003 tree field;
2004
2005 if (TREE_CODE (array_type) == RECORD_TYPE
2006 && (TYPE_IS_PADDING_P (array_type)
2007 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2008 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2009
2010 if (TREE_CODE (array_type) == ARRAY_TYPE
2011 || (TREE_CODE (array_type) == INTEGER_TYPE
2012 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2013 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2014
2015 /* First make the list for a CONSTRUCTOR for the template. Go down the
2016 field list of the template instead of the type chain because this
2017 array might be an Ada array of arrays and we can't tell where the
2018 nested arrays stop being the underlying object. */
2019
2020 for (field = TYPE_FIELDS (template_type); field;
2021 (bound_list
2022 ? (bound_list = TREE_CHAIN (bound_list))
2023 : (array_type = TREE_TYPE (array_type))),
2024 field = TREE_CHAIN (TREE_CHAIN (field)))
2025 {
2026 tree bounds, min, max;
2027
2028 /* If we have a bound list, get the bounds from there. Likewise
2029 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2030 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2031 This will give us a maximum range. */
2032 if (bound_list)
2033 bounds = TREE_VALUE (bound_list);
2034 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2035 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2036 else if (expr && TREE_CODE (expr) == PARM_DECL
2037 && DECL_BY_COMPONENT_PTR_P (expr))
2038 bounds = TREE_TYPE (field);
2039 else
2040 gcc_unreachable ();
2041
2042 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2043 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2044
2045 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2046 substitute it from OBJECT. */
2047 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2048 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2049
2050 template_elts = tree_cons (TREE_CHAIN (field), max,
2051 tree_cons (field, min, template_elts));
2052 }
2053
2054 return gnat_build_constructor (template_type, nreverse (template_elts));
2055 }
2056 \f
2057 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2058 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2059 in the type contains in its DECL_INITIAL the expression to use when
2060 a constructor is made for the type. GNAT_ENTITY is an entity used
2061 to print out an error message if the mechanism cannot be applied to
2062 an object of that type and also for the name. */
2063
2064 tree
2065 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2066 {
2067 tree record_type = make_node (RECORD_TYPE);
2068 tree field_list = 0;
2069 int class;
2070 int dtype = 0;
2071 tree inner_type;
2072 int ndim;
2073 int i;
2074 tree *idx_arr;
2075 tree tem;
2076
2077 /* If TYPE is an unconstrained array, use the underlying array type. */
2078 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2079 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2080
2081 /* If this is an array, compute the number of dimensions in the array,
2082 get the index types, and point to the inner type. */
2083 if (TREE_CODE (type) != ARRAY_TYPE)
2084 ndim = 0;
2085 else
2086 for (ndim = 1, inner_type = type;
2087 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2088 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2089 ndim++, inner_type = TREE_TYPE (inner_type))
2090 ;
2091
2092 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2093
2094 if (mech != By_Descriptor_NCA
2095 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2096 for (i = ndim - 1, inner_type = type;
2097 i >= 0;
2098 i--, inner_type = TREE_TYPE (inner_type))
2099 idx_arr[i] = TYPE_DOMAIN (inner_type);
2100 else
2101 for (i = 0, inner_type = type;
2102 i < ndim;
2103 i++, inner_type = TREE_TYPE (inner_type))
2104 idx_arr[i] = TYPE_DOMAIN (inner_type);
2105
2106 /* Now get the DTYPE value. */
2107 switch (TREE_CODE (type))
2108 {
2109 case INTEGER_TYPE:
2110 case ENUMERAL_TYPE:
2111 if (TYPE_VAX_FLOATING_POINT_P (type))
2112 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2113 {
2114 case 6:
2115 dtype = 10;
2116 break;
2117 case 9:
2118 dtype = 11;
2119 break;
2120 case 15:
2121 dtype = 27;
2122 break;
2123 }
2124 else
2125 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2126 {
2127 case 8:
2128 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2129 break;
2130 case 16:
2131 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2132 break;
2133 case 32:
2134 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2135 break;
2136 case 64:
2137 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2138 break;
2139 case 128:
2140 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2141 break;
2142 }
2143 break;
2144
2145 case REAL_TYPE:
2146 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2147 break;
2148
2149 case COMPLEX_TYPE:
2150 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2151 && TYPE_VAX_FLOATING_POINT_P (type))
2152 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2153 {
2154 case 6:
2155 dtype = 12;
2156 break;
2157 case 9:
2158 dtype = 13;
2159 break;
2160 case 15:
2161 dtype = 29;
2162 }
2163 else
2164 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2165 break;
2166
2167 case ARRAY_TYPE:
2168 dtype = 14;
2169 break;
2170
2171 default:
2172 break;
2173 }
2174
2175 /* Get the CLASS value. */
2176 switch (mech)
2177 {
2178 case By_Descriptor_A:
2179 class = 4;
2180 break;
2181 case By_Descriptor_NCA:
2182 class = 10;
2183 break;
2184 case By_Descriptor_SB:
2185 class = 15;
2186 break;
2187 default:
2188 class = 1;
2189 }
2190
2191 /* Make the type for a descriptor for VMS. The first four fields
2192 are the same for all types. */
2193
2194 field_list
2195 = chainon (field_list,
2196 make_descriptor_field
2197 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2198 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2199
2200 field_list = chainon (field_list,
2201 make_descriptor_field ("DTYPE",
2202 gnat_type_for_size (8, 1),
2203 record_type, size_int (dtype)));
2204 field_list = chainon (field_list,
2205 make_descriptor_field ("CLASS",
2206 gnat_type_for_size (8, 1),
2207 record_type, size_int (class)));
2208
2209 field_list
2210 = chainon (field_list,
2211 make_descriptor_field
2212 ("POINTER",
2213 build_pointer_type_for_mode (type, SImode, false), record_type,
2214 build1 (ADDR_EXPR,
2215 build_pointer_type_for_mode (type, SImode, false),
2216 build0 (PLACEHOLDER_EXPR, type))));
2217
2218 switch (mech)
2219 {
2220 case By_Descriptor:
2221 case By_Descriptor_S:
2222 break;
2223
2224 case By_Descriptor_SB:
2225 field_list
2226 = chainon (field_list,
2227 make_descriptor_field
2228 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2229 TREE_CODE (type) == ARRAY_TYPE
2230 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2231 field_list
2232 = chainon (field_list,
2233 make_descriptor_field
2234 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2235 TREE_CODE (type) == ARRAY_TYPE
2236 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2237 break;
2238
2239 case By_Descriptor_A:
2240 case By_Descriptor_NCA:
2241 field_list = chainon (field_list,
2242 make_descriptor_field ("SCALE",
2243 gnat_type_for_size (8, 1),
2244 record_type,
2245 size_zero_node));
2246
2247 field_list = chainon (field_list,
2248 make_descriptor_field ("DIGITS",
2249 gnat_type_for_size (8, 1),
2250 record_type,
2251 size_zero_node));
2252
2253 field_list
2254 = chainon (field_list,
2255 make_descriptor_field
2256 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2257 size_int (mech == By_Descriptor_NCA
2258 ? 0
2259 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2260 : (TREE_CODE (type) == ARRAY_TYPE
2261 && TYPE_CONVENTION_FORTRAN_P (type)
2262 ? 224 : 192))));
2263
2264 field_list = chainon (field_list,
2265 make_descriptor_field ("DIMCT",
2266 gnat_type_for_size (8, 1),
2267 record_type,
2268 size_int (ndim)));
2269
2270 field_list = chainon (field_list,
2271 make_descriptor_field ("ARSIZE",
2272 gnat_type_for_size (32, 1),
2273 record_type,
2274 size_in_bytes (type)));
2275
2276 /* Now build a pointer to the 0,0,0... element. */
2277 tem = build0 (PLACEHOLDER_EXPR, type);
2278 for (i = 0, inner_type = type; i < ndim;
2279 i++, inner_type = TREE_TYPE (inner_type))
2280 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2281 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2282 NULL_TREE, NULL_TREE);
2283
2284 field_list
2285 = chainon (field_list,
2286 make_descriptor_field
2287 ("A0",
2288 build_pointer_type_for_mode (inner_type, SImode, false),
2289 record_type,
2290 build1 (ADDR_EXPR,
2291 build_pointer_type_for_mode (inner_type, SImode,
2292 false),
2293 tem)));
2294
2295 /* Next come the addressing coefficients. */
2296 tem = size_int (1);
2297 for (i = 0; i < ndim; i++)
2298 {
2299 char fname[3];
2300 tree idx_length
2301 = size_binop (MULT_EXPR, tem,
2302 size_binop (PLUS_EXPR,
2303 size_binop (MINUS_EXPR,
2304 TYPE_MAX_VALUE (idx_arr[i]),
2305 TYPE_MIN_VALUE (idx_arr[i])),
2306 size_int (1)));
2307
2308 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2309 fname[1] = '0' + i, fname[2] = 0;
2310 field_list
2311 = chainon (field_list,
2312 make_descriptor_field (fname,
2313 gnat_type_for_size (32, 1),
2314 record_type, idx_length));
2315
2316 if (mech == By_Descriptor_NCA)
2317 tem = idx_length;
2318 }
2319
2320 /* Finally here are the bounds. */
2321 for (i = 0; i < ndim; i++)
2322 {
2323 char fname[3];
2324
2325 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2326 field_list
2327 = chainon (field_list,
2328 make_descriptor_field
2329 (fname, gnat_type_for_size (32, 1), record_type,
2330 TYPE_MIN_VALUE (idx_arr[i])));
2331
2332 fname[0] = 'U';
2333 field_list
2334 = chainon (field_list,
2335 make_descriptor_field
2336 (fname, gnat_type_for_size (32, 1), record_type,
2337 TYPE_MAX_VALUE (idx_arr[i])));
2338 }
2339 break;
2340
2341 default:
2342 post_error ("unsupported descriptor type for &", gnat_entity);
2343 }
2344
2345 finish_record_type (record_type, field_list, false, true);
2346 create_type_decl (create_concat_name (gnat_entity, "DESC"), record_type,
2347 NULL, true, false, gnat_entity);
2348
2349 return record_type;
2350 }
2351
2352 /* Utility routine for above code to make a field. */
2353
2354 static tree
2355 make_descriptor_field (const char *name, tree type,
2356 tree rec_type, tree initial)
2357 {
2358 tree field
2359 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2360
2361 DECL_INITIAL (field) = initial;
2362 return field;
2363 }
2364 \f
2365 /* Build a type to be used to represent an aliased object whose nominal
2366 type is an unconstrained array. This consists of a RECORD_TYPE containing
2367 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2368 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2369 is used to represent an arbitrary unconstrained object. Use NAME
2370 as the name of the record. */
2371
2372 tree
2373 build_unc_object_type (tree template_type, tree object_type, tree name)
2374 {
2375 tree type = make_node (RECORD_TYPE);
2376 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2377 template_type, type, 0, 0, 0, 1);
2378 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2379 type, 0, 0, 0, 1);
2380
2381 TYPE_NAME (type) = name;
2382 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2383 finish_record_type (type,
2384 chainon (chainon (NULL_TREE, template_field),
2385 array_field),
2386 false, false);
2387
2388 return type;
2389 }
2390 \f
2391 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2392 the normal case this is just two adjustments, but we have more to do
2393 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2394
2395 void
2396 update_pointer_to (tree old_type, tree new_type)
2397 {
2398 tree ptr = TYPE_POINTER_TO (old_type);
2399 tree ref = TYPE_REFERENCE_TO (old_type);
2400 tree ptr1, ref1;
2401 tree type;
2402
2403 /* If this is the main variant, process all the other variants first. */
2404 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2405 for (type = TYPE_NEXT_VARIANT (old_type); type;
2406 type = TYPE_NEXT_VARIANT (type))
2407 update_pointer_to (type, new_type);
2408
2409 /* If no pointer or reference, we are done. */
2410 if (!ptr && !ref)
2411 return;
2412
2413 /* Merge the old type qualifiers in the new type.
2414
2415 Each old variant has qualifiers for specific reasons, and the new
2416 designated type as well. Each set of qualifiers represents useful
2417 information grabbed at some point, and merging the two simply unifies
2418 these inputs into the final type description.
2419
2420 Consider for instance a volatile type frozen after an access to constant
2421 type designating it. After the designated type freeze, we get here with a
2422 volatile new_type and a dummy old_type with a readonly variant, created
2423 when the access type was processed. We shall make a volatile and readonly
2424 designated type, because that's what it really is.
2425
2426 We might also get here for a non-dummy old_type variant with different
2427 qualifiers than the new_type ones, for instance in some cases of pointers
2428 to private record type elaboration (see the comments around the call to
2429 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2430 qualifiers in thoses cases too, to avoid accidentally discarding the
2431 initial set, and will often end up with old_type == new_type then. */
2432 new_type = build_qualified_type (new_type,
2433 TYPE_QUALS (old_type)
2434 | TYPE_QUALS (new_type));
2435
2436 /* If the new type and the old one are identical, there is nothing to
2437 update. */
2438 if (old_type == new_type)
2439 return;
2440
2441 /* Otherwise, first handle the simple case. */
2442 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2443 {
2444 TYPE_POINTER_TO (new_type) = ptr;
2445 TYPE_REFERENCE_TO (new_type) = ref;
2446
2447 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
2448 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
2449 ptr1 = TYPE_NEXT_VARIANT (ptr1))
2450 {
2451 TREE_TYPE (ptr1) = new_type;
2452
2453 if (TYPE_NAME (ptr1)
2454 && TREE_CODE (TYPE_NAME (ptr1)) == TYPE_DECL
2455 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2456 rest_of_decl_compilation (TYPE_NAME (ptr1),
2457 global_bindings_p (), 0);
2458 }
2459
2460 for (; ref; ref = TYPE_NEXT_PTR_TO (ref))
2461 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
2462 ref1 = TYPE_NEXT_VARIANT (ref1))
2463 {
2464 TREE_TYPE (ref1) = new_type;
2465
2466 if (TYPE_NAME (ref1)
2467 && TREE_CODE (TYPE_NAME (ref1)) == TYPE_DECL
2468 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2469 rest_of_decl_compilation (TYPE_NAME (ref1),
2470 global_bindings_p (), 0);
2471 }
2472 }
2473
2474 /* Now deal with the unconstrained array case. In this case the "pointer"
2475 is actually a RECORD_TYPE where the types of both fields are
2476 pointers to void. In that case, copy the field list from the
2477 old type to the new one and update the fields' context. */
2478 else if (TREE_CODE (ptr) != RECORD_TYPE || !TYPE_IS_FAT_POINTER_P (ptr))
2479 gcc_unreachable ();
2480
2481 else
2482 {
2483 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2484 tree ptr_temp_type;
2485 tree new_ref;
2486 tree var;
2487
2488 SET_DECL_ORIGINAL_FIELD (TYPE_FIELDS (ptr),
2489 TYPE_FIELDS (TYPE_POINTER_TO (new_type)));
2490 SET_DECL_ORIGINAL_FIELD (TREE_CHAIN (TYPE_FIELDS (ptr)),
2491 TREE_CHAIN (TYPE_FIELDS
2492 (TYPE_POINTER_TO (new_type))));
2493
2494 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2495 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2496 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2497
2498 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2499 template bounds.
2500
2501 ??? This is now the only use of gnat_substitute_in_type, which
2502 is now a very "heavy" routine to do this, so it should be replaced
2503 at some point. */
2504 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2505 new_ref = build3 (COMPONENT_REF, ptr_temp_type,
2506 build0 (PLACEHOLDER_EXPR, ptr),
2507 TREE_CHAIN (TYPE_FIELDS (ptr)), NULL_TREE);
2508
2509 update_pointer_to
2510 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2511 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2512 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2513
2514 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2515 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2516
2517 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2518 = TREE_TYPE (new_type) = ptr;
2519
2520 /* Now handle updating the allocation record, what the thin pointer
2521 points to. Update all pointers from the old record into the new
2522 one, update the types of the fields, and recompute the size. */
2523
2524 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2525
2526 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2527 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2528 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2529 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2530 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2531 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2532 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2533
2534 TYPE_SIZE (new_obj_rec)
2535 = size_binop (PLUS_EXPR,
2536 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2537 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2538 TYPE_SIZE_UNIT (new_obj_rec)
2539 = size_binop (PLUS_EXPR,
2540 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2541 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2542 rest_of_type_compilation (ptr, global_bindings_p ());
2543 }
2544 }
2545 \f
2546 /* Convert a pointer to a constrained array into a pointer to a fat
2547 pointer. This involves making or finding a template. */
2548
2549 static tree
2550 convert_to_fat_pointer (tree type, tree expr)
2551 {
2552 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2553 tree template, template_addr;
2554 tree etype = TREE_TYPE (expr);
2555
2556 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2557 pointer to the template and array. */
2558 if (integer_zerop (expr))
2559 return
2560 gnat_build_constructor
2561 (type,
2562 tree_cons (TYPE_FIELDS (type),
2563 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2564 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2565 convert (build_pointer_type (template_type),
2566 expr),
2567 NULL_TREE)));
2568
2569 /* If EXPR is a thin pointer, make the template and data from the record. */
2570
2571 else if (TYPE_THIN_POINTER_P (etype))
2572 {
2573 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2574
2575 expr = save_expr (expr);
2576 if (TREE_CODE (expr) == ADDR_EXPR)
2577 expr = TREE_OPERAND (expr, 0);
2578 else
2579 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2580
2581 template = build_component_ref (expr, NULL_TREE, fields, false);
2582 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2583 build_component_ref (expr, NULL_TREE,
2584 TREE_CHAIN (fields), false));
2585 }
2586 else
2587 /* Otherwise, build the constructor for the template. */
2588 template = build_template (template_type, TREE_TYPE (etype), expr);
2589
2590 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2591
2592 /* The result is a CONSTRUCTOR for the fat pointer.
2593
2594 If expr is an argument of a foreign convention subprogram, the type it
2595 points to is directly the component type. In this case, the expression
2596 type may not match the corresponding FIELD_DECL type at this point, so we
2597 call "convert" here to fix that up if necessary. This type consistency is
2598 required, for instance because it ensures that possible later folding of
2599 component_refs against this constructor always yields something of the
2600 same type as the initial reference.
2601
2602 Note that the call to "build_template" above is still fine, because it
2603 will only refer to the provided template_type in this case. */
2604 return
2605 gnat_build_constructor
2606 (type, tree_cons (TYPE_FIELDS (type),
2607 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2608 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2609 template_addr, NULL_TREE)));
2610 }
2611 \f
2612 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2613 is something that is a fat pointer, so convert to it first if it EXPR
2614 is not already a fat pointer. */
2615
2616 static tree
2617 convert_to_thin_pointer (tree type, tree expr)
2618 {
2619 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2620 expr
2621 = convert_to_fat_pointer
2622 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2623
2624 /* We get the pointer to the data and use a NOP_EXPR to make it the
2625 proper GCC type. */
2626 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
2627 false);
2628 expr = build1 (NOP_EXPR, type, expr);
2629
2630 return expr;
2631 }
2632 \f
2633 /* Create an expression whose value is that of EXPR,
2634 converted to type TYPE. The TREE_TYPE of the value
2635 is always TYPE. This function implements all reasonable
2636 conversions; callers should filter out those that are
2637 not permitted by the language being compiled. */
2638
2639 tree
2640 convert (tree type, tree expr)
2641 {
2642 enum tree_code code = TREE_CODE (type);
2643 tree etype = TREE_TYPE (expr);
2644 enum tree_code ecode = TREE_CODE (etype);
2645 tree tem;
2646
2647 /* If EXPR is already the right type, we are done. */
2648 if (type == etype)
2649 return expr;
2650
2651 /* If the input type has padding, remove it by doing a component reference
2652 to the field. If the output type has padding, make a constructor
2653 to build the record. If both input and output have padding and are
2654 of variable size, do this as an unchecked conversion. */
2655 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2656 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2657 && (!TREE_CONSTANT (TYPE_SIZE (type))
2658 || !TREE_CONSTANT (TYPE_SIZE (etype))))
2659 ;
2660 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2661 {
2662 /* If we have just converted to this padded type, just get
2663 the inner expression. */
2664 if (TREE_CODE (expr) == CONSTRUCTOR
2665 && CONSTRUCTOR_ELTS (expr)
2666 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2667 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2668 else
2669 return convert (type,
2670 build_component_ref (expr, NULL_TREE,
2671 TYPE_FIELDS (etype), false));
2672 }
2673 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2674 {
2675 /* If we previously converted from another type and our type is
2676 of variable size, remove the conversion to avoid the need for
2677 variable-size temporaries. */
2678 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2679 && !TREE_CONSTANT (TYPE_SIZE (type)))
2680 expr = TREE_OPERAND (expr, 0);
2681
2682 /* If we are just removing the padding from expr, convert the original
2683 object if we have variable size. That will avoid the need
2684 for some variable-size temporaries. */
2685 if (TREE_CODE (expr) == COMPONENT_REF
2686 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2687 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2688 && !TREE_CONSTANT (TYPE_SIZE (type)))
2689 return convert (type, TREE_OPERAND (expr, 0));
2690
2691 /* If the result type is a padded type with a self-referentially-sized
2692 field and the expression type is a record, do this as an
2693 unchecked converstion. */
2694 else if (TREE_CODE (etype) == RECORD_TYPE
2695 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2696 return unchecked_convert (type, expr, false);
2697
2698 else
2699 return
2700 gnat_build_constructor (type,
2701 tree_cons (TYPE_FIELDS (type),
2702 convert (TREE_TYPE
2703 (TYPE_FIELDS (type)),
2704 expr),
2705 NULL_TREE));
2706 }
2707
2708 /* If the input is a biased type, adjust first. */
2709 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2710 return convert (type, fold (build2 (PLUS_EXPR, TREE_TYPE (etype),
2711 fold (build1 (NOP_EXPR,
2712 TREE_TYPE (etype),
2713 expr)),
2714 TYPE_MIN_VALUE (etype))));
2715
2716 /* If the input is a justified modular type, we need to extract
2717 the actual object before converting it to any other type with the
2718 exception of an unconstrained array. */
2719 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
2720 && code != UNCONSTRAINED_ARRAY_TYPE)
2721 return convert (type, build_component_ref (expr, NULL_TREE,
2722 TYPE_FIELDS (etype), false));
2723
2724 /* If converting to a type that contains a template, convert to the data
2725 type and then build the template. */
2726 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2727 {
2728 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2729
2730 /* If the source already has a template, get a reference to the
2731 associated array only, as we are going to rebuild a template
2732 for the target type anyway. */
2733 expr = maybe_unconstrained_array (expr);
2734
2735 return
2736 gnat_build_constructor
2737 (type,
2738 tree_cons (TYPE_FIELDS (type),
2739 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2740 obj_type, NULL_TREE),
2741 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2742 convert (obj_type, expr), NULL_TREE)));
2743 }
2744
2745 /* There are some special cases of expressions that we process
2746 specially. */
2747 switch (TREE_CODE (expr))
2748 {
2749 case ERROR_MARK:
2750 return expr;
2751
2752 case NULL_EXPR:
2753 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2754 conversion in gnat_expand_expr. NULL_EXPR does not represent
2755 and actual value, so no conversion is needed. */
2756 expr = copy_node (expr);
2757 TREE_TYPE (expr) = type;
2758 return expr;
2759
2760 case STRING_CST:
2761 /* If we are converting a STRING_CST to another constrained array type,
2762 just make a new one in the proper type. */
2763 if (code == ecode && AGGREGATE_TYPE_P (etype)
2764 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2765 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
2766 && (TREE_CODE (expr) == STRING_CST
2767 || get_alias_set (etype) == get_alias_set (type)))
2768 {
2769 expr = copy_node (expr);
2770 TREE_TYPE (expr) = type;
2771 return expr;
2772 }
2773 break;
2774
2775 case UNCONSTRAINED_ARRAY_REF:
2776 /* Convert this to the type of the inner array by getting the address of
2777 the array from the template. */
2778 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
2779 build_component_ref (TREE_OPERAND (expr, 0),
2780 get_identifier ("P_ARRAY"),
2781 NULL_TREE, false));
2782 etype = TREE_TYPE (expr);
2783 ecode = TREE_CODE (etype);
2784 break;
2785
2786 case VIEW_CONVERT_EXPR:
2787 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
2788 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2789 return convert (type, TREE_OPERAND (expr, 0));
2790 break;
2791
2792 case INDIRECT_REF:
2793 /* If both types are record types, just convert the pointer and
2794 make a new INDIRECT_REF.
2795
2796 ??? Disable this for now since it causes problems with the
2797 code in build_binary_op for MODIFY_EXPR which wants to
2798 strip off conversions. But that code really is a mess and
2799 we need to do this a much better way some time. */
2800 if (0
2801 && (TREE_CODE (type) == RECORD_TYPE
2802 || TREE_CODE (type) == UNION_TYPE)
2803 && (TREE_CODE (etype) == RECORD_TYPE
2804 || TREE_CODE (etype) == UNION_TYPE)
2805 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2806 return build_unary_op (INDIRECT_REF, NULL_TREE,
2807 convert (build_pointer_type (type),
2808 TREE_OPERAND (expr, 0)));
2809 break;
2810
2811 default:
2812 break;
2813 }
2814
2815 /* Check for converting to a pointer to an unconstrained array. */
2816 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
2817 return convert_to_fat_pointer (type, expr);
2818
2819 /* If we're converting between two aggregate types that have the same main
2820 variant, just make a VIEW_CONVER_EXPR. */
2821 else if (AGGREGATE_TYPE_P (type)
2822 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2823 return build1 (VIEW_CONVERT_EXPR, type, expr);
2824
2825 /* In all other cases of related types, make a NOP_EXPR. */
2826 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
2827 || (code == INTEGER_CST && ecode == INTEGER_CST
2828 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
2829 return fold (build1 (NOP_EXPR, type, expr));
2830
2831 switch (code)
2832 {
2833 case VOID_TYPE:
2834 return build1 (CONVERT_EXPR, type, expr);
2835
2836 case BOOLEAN_TYPE:
2837 return fold (build1 (NOP_EXPR, type, gnat_truthvalue_conversion (expr)));
2838
2839 case INTEGER_TYPE:
2840 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
2841 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
2842 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
2843 return unchecked_convert (type, expr, false);
2844 else if (TYPE_BIASED_REPRESENTATION_P (type))
2845 return fold (build1 (CONVERT_EXPR, type,
2846 fold (build2 (MINUS_EXPR, TREE_TYPE (type),
2847 convert (TREE_TYPE (type), expr),
2848 TYPE_MIN_VALUE (type)))));
2849
2850 /* ... fall through ... */
2851
2852 case ENUMERAL_TYPE:
2853 return fold (convert_to_integer (type, expr));
2854
2855 case POINTER_TYPE:
2856 case REFERENCE_TYPE:
2857 /* If converting between two pointers to records denoting
2858 both a template and type, adjust if needed to account
2859 for any differing offsets, since one might be negative. */
2860 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
2861 {
2862 tree bit_diff
2863 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
2864 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
2865 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
2866 sbitsize_int (BITS_PER_UNIT));
2867
2868 expr = build1 (NOP_EXPR, type, expr);
2869 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
2870 if (integer_zerop (byte_diff))
2871 return expr;
2872
2873 return build_binary_op (PLUS_EXPR, type, expr,
2874 fold (convert_to_pointer (type, byte_diff)));
2875 }
2876
2877 /* If converting to a thin pointer, handle specially. */
2878 if (TYPE_THIN_POINTER_P (type)
2879 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
2880 return convert_to_thin_pointer (type, expr);
2881
2882 /* If converting fat pointer to normal pointer, get the pointer to the
2883 array and then convert it. */
2884 else if (TYPE_FAT_POINTER_P (etype))
2885 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
2886 NULL_TREE, false);
2887
2888 return fold (convert_to_pointer (type, expr));
2889
2890 case REAL_TYPE:
2891 return fold (convert_to_real (type, expr));
2892
2893 case RECORD_TYPE:
2894 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
2895 return
2896 gnat_build_constructor
2897 (type, tree_cons (TYPE_FIELDS (type),
2898 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2899 NULL_TREE));
2900
2901 /* ... fall through ... */
2902
2903 case ARRAY_TYPE:
2904 /* In these cases, assume the front-end has validated the conversion.
2905 If the conversion is valid, it will be a bit-wise conversion, so
2906 it can be viewed as an unchecked conversion. */
2907 return unchecked_convert (type, expr, false);
2908
2909 case UNION_TYPE:
2910 /* For unchecked unions, just validate that the type is indeed that of
2911 a field of the type. Then make the simple conversion. */
2912 if (TYPE_UNCHECKED_UNION_P (type))
2913 {
2914 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
2915 {
2916 if (TREE_TYPE (tem) == etype)
2917 return build1 (CONVERT_EXPR, type, expr);
2918 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
2919 && (TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
2920 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
2921 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
2922 return build1 (CONVERT_EXPR, type,
2923 convert (TREE_TYPE (tem), expr));
2924 }
2925
2926 gcc_unreachable ();
2927 }
2928 else
2929 /* Otherwise, this is a conversion between a tagged type and some
2930 subtype, which we have to mark as a UNION_TYPE because of
2931 overlapping fields. */
2932 return unchecked_convert (type, expr, false);
2933
2934 case UNCONSTRAINED_ARRAY_TYPE:
2935 /* If EXPR is a constrained array, take its address, convert it to a
2936 fat pointer, and then dereference it. Likewise if EXPR is a
2937 record containing both a template and a constrained array.
2938 Note that a record representing a justified modular type
2939 always represents a packed constrained array. */
2940 if (ecode == ARRAY_TYPE
2941 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
2942 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
2943 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
2944 return
2945 build_unary_op
2946 (INDIRECT_REF, NULL_TREE,
2947 convert_to_fat_pointer (TREE_TYPE (type),
2948 build_unary_op (ADDR_EXPR,
2949 NULL_TREE, expr)));
2950
2951 /* Do something very similar for converting one unconstrained
2952 array to another. */
2953 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
2954 return
2955 build_unary_op (INDIRECT_REF, NULL_TREE,
2956 convert (TREE_TYPE (type),
2957 build_unary_op (ADDR_EXPR,
2958 NULL_TREE, expr)));
2959 else
2960 gcc_unreachable ();
2961
2962 case COMPLEX_TYPE:
2963 return fold (convert_to_complex (type, expr));
2964
2965 default:
2966 gcc_unreachable ();
2967 }
2968 }
2969 \f
2970 /* Remove all conversions that are done in EXP. This includes converting
2971 from a padded type or to a justified modular type. If TRUE_ADDRESS
2972 is true, always return the address of the containing object even if
2973 the address is not bit-aligned. */
2974
2975 tree
2976 remove_conversions (tree exp, bool true_address)
2977 {
2978 switch (TREE_CODE (exp))
2979 {
2980 case CONSTRUCTOR:
2981 if (true_address
2982 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
2983 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
2984 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), true);
2985 break;
2986
2987 case COMPONENT_REF:
2988 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
2989 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2990 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
2991 break;
2992
2993 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
2994 case NOP_EXPR: case CONVERT_EXPR:
2995 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
2996
2997 default:
2998 break;
2999 }
3000
3001 return exp;
3002 }
3003 \f
3004 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3005 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3006 likewise return an expression pointing to the underlying array. */
3007
3008 tree
3009 maybe_unconstrained_array (tree exp)
3010 {
3011 enum tree_code code = TREE_CODE (exp);
3012 tree new;
3013
3014 switch (TREE_CODE (TREE_TYPE (exp)))
3015 {
3016 case UNCONSTRAINED_ARRAY_TYPE:
3017 if (code == UNCONSTRAINED_ARRAY_REF)
3018 {
3019 new
3020 = build_unary_op (INDIRECT_REF, NULL_TREE,
3021 build_component_ref (TREE_OPERAND (exp, 0),
3022 get_identifier ("P_ARRAY"),
3023 NULL_TREE, false));
3024 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3025 return new;
3026 }
3027
3028 else if (code == NULL_EXPR)
3029 return build1 (NULL_EXPR,
3030 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3031 (TREE_TYPE (TREE_TYPE (exp))))),
3032 TREE_OPERAND (exp, 0));
3033
3034 case RECORD_TYPE:
3035 /* If this is a padded type, convert to the unpadded type and see if
3036 it contains a template. */
3037 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3038 {
3039 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3040 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3041 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3042 return
3043 build_component_ref (new, NULL_TREE,
3044 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3045 0);
3046 }
3047 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3048 return
3049 build_component_ref (exp, NULL_TREE,
3050 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3051 break;
3052
3053 default:
3054 break;
3055 }
3056
3057 return exp;
3058 }
3059 \f
3060 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3061 If NOTRUNC_P is true, truncation operations should be suppressed. */
3062
3063 tree
3064 unchecked_convert (tree type, tree expr, bool notrunc_p)
3065 {
3066 tree etype = TREE_TYPE (expr);
3067
3068 /* If the expression is already the right type, we are done. */
3069 if (etype == type)
3070 return expr;
3071
3072 /* If both types types are integral just do a normal conversion.
3073 Likewise for a conversion to an unconstrained array. */
3074 if ((((INTEGRAL_TYPE_P (type)
3075 && !(TREE_CODE (type) == INTEGER_TYPE
3076 && TYPE_VAX_FLOATING_POINT_P (type)))
3077 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3078 || (TREE_CODE (type) == RECORD_TYPE
3079 && TYPE_JUSTIFIED_MODULAR_P (type)))
3080 && ((INTEGRAL_TYPE_P (etype)
3081 && !(TREE_CODE (etype) == INTEGER_TYPE
3082 && TYPE_VAX_FLOATING_POINT_P (etype)))
3083 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
3084 || (TREE_CODE (etype) == RECORD_TYPE
3085 && TYPE_JUSTIFIED_MODULAR_P (etype))))
3086 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3087 {
3088 tree rtype = type;
3089
3090 if (TREE_CODE (etype) == INTEGER_TYPE
3091 && TYPE_BIASED_REPRESENTATION_P (etype))
3092 {
3093 tree ntype = copy_type (etype);
3094
3095 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3096 TYPE_MAIN_VARIANT (ntype) = ntype;
3097 expr = build1 (NOP_EXPR, ntype, expr);
3098 }
3099
3100 if (TREE_CODE (type) == INTEGER_TYPE
3101 && TYPE_BIASED_REPRESENTATION_P (type))
3102 {
3103 rtype = copy_type (type);
3104 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3105 TYPE_MAIN_VARIANT (rtype) = rtype;
3106 }
3107
3108 expr = convert (rtype, expr);
3109 if (type != rtype)
3110 expr = build1 (NOP_EXPR, type, expr);
3111 }
3112
3113 /* If we are converting TO an integral type whose precision is not the
3114 same as its size, first unchecked convert to a record that contains
3115 an object of the output type. Then extract the field. */
3116 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3117 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3118 GET_MODE_BITSIZE (TYPE_MODE (type))))
3119 {
3120 tree rec_type = make_node (RECORD_TYPE);
3121 tree field = create_field_decl (get_identifier ("OBJ"), type,
3122 rec_type, 1, 0, 0, 0);
3123
3124 TYPE_FIELDS (rec_type) = field;
3125 layout_type (rec_type);
3126
3127 expr = unchecked_convert (rec_type, expr, notrunc_p);
3128 expr = build_component_ref (expr, NULL_TREE, field, 0);
3129 }
3130
3131 /* Similarly for integral input type whose precision is not equal to its
3132 size. */
3133 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
3134 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3135 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3136 {
3137 tree rec_type = make_node (RECORD_TYPE);
3138 tree field
3139 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3140 1, 0, 0, 0);
3141
3142 TYPE_FIELDS (rec_type) = field;
3143 layout_type (rec_type);
3144
3145 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3146 expr = unchecked_convert (type, expr, notrunc_p);
3147 }
3148
3149 /* We have a special case when we are converting between two
3150 unconstrained array types. In that case, take the address,
3151 convert the fat pointer types, and dereference. */
3152 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3153 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3154 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3155 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3156 build_unary_op (ADDR_EXPR, NULL_TREE,
3157 expr)));
3158 else
3159 {
3160 expr = maybe_unconstrained_array (expr);
3161
3162 /* There's no point in doing two unchecked conversions in a row. */
3163 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3164 expr = TREE_OPERAND (expr, 0);
3165
3166 etype = TREE_TYPE (expr);
3167 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3168 }
3169
3170 /* If the result is an integral type whose size is not equal to
3171 the size of the underlying machine type, sign- or zero-extend
3172 the result. We need not do this in the case where the input is
3173 an integral type of the same precision and signedness or if the output
3174 is a biased type or if both the input and output are unsigned. */
3175 if (!notrunc_p
3176 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
3177 && !(TREE_CODE (type) == INTEGER_TYPE
3178 && TYPE_BIASED_REPRESENTATION_P (type))
3179 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3180 GET_MODE_BITSIZE (TYPE_MODE (type)))
3181 && !(INTEGRAL_TYPE_P (etype)
3182 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
3183 && operand_equal_p (TYPE_RM_SIZE (type),
3184 (TYPE_RM_SIZE (etype) != 0
3185 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3186 0))
3187 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
3188 {
3189 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3190 TYPE_UNSIGNED (type));
3191 tree shift_expr
3192 = convert (base_type,
3193 size_binop (MINUS_EXPR,
3194 bitsize_int
3195 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3196 TYPE_RM_SIZE (type)));
3197 expr
3198 = convert (type,
3199 build_binary_op (RSHIFT_EXPR, base_type,
3200 build_binary_op (LSHIFT_EXPR, base_type,
3201 convert (base_type, expr),
3202 shift_expr),
3203 shift_expr));
3204 }
3205
3206 /* An unchecked conversion should never raise Constraint_Error. The code
3207 below assumes that GCC's conversion routines overflow the same way that
3208 the underlying hardware does. This is probably true. In the rare case
3209 when it is false, we can rely on the fact that such conversions are
3210 erroneous anyway. */
3211 if (TREE_CODE (expr) == INTEGER_CST)
3212 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3213
3214 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3215 show no longer constant. */
3216 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3217 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
3218 OEP_ONLY_CONST))
3219 TREE_CONSTANT (expr) = 0;
3220
3221 return expr;
3222 }
3223 \f
3224 /* Search the chain of currently reachable declarations for a builtin
3225 FUNCTION_DECL node corresponding to function NAME (an IDENTIFIER_NODE).
3226 Return the first node found, if any, or NULL_TREE otherwise. */
3227
3228 tree
3229 builtin_decl_for (tree name __attribute__ ((unused)))
3230 {
3231 /* ??? not clear yet how to implement this function in tree-ssa, so
3232 return NULL_TREE for now */
3233 return NULL_TREE;
3234 }
3235
3236 #include "gt-ada-utils.h"
3237 #include "gtype-ada.h"