41st Cygnus<->FSF merge
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21 #include "config.h"
22 #include <stdio.h>
23 #include "obstack.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "flags.h"
27
28 #define CEIL(x,y) (((x) + (y) - 1) / (y))
29
30 /* Return nonzero if REF is an lvalue valid for this language.
31 Lvalues can be assigned, unless they have TREE_READONLY.
32 Lvalues can have their address taken, unless they have DECL_REGISTER. */
33
34 int
35 lvalue_p (ref)
36 tree ref;
37 {
38 register enum tree_code code = TREE_CODE (ref);
39
40 if (language_lvalue_valid (ref))
41 {
42 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
43 return 1;
44
45 switch (code)
46 {
47 /* preincrements and predecrements are valid lvals, provided
48 what they refer to are valid lvals. */
49 case PREINCREMENT_EXPR:
50 case PREDECREMENT_EXPR:
51 case COMPONENT_REF:
52 case SAVE_EXPR:
53 return lvalue_p (TREE_OPERAND (ref, 0));
54
55 case STRING_CST:
56 return 1;
57
58 case VAR_DECL:
59 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
60 && DECL_LANG_SPECIFIC (ref)
61 && DECL_IN_AGGR_P (ref))
62 return 0;
63 case INDIRECT_REF:
64 case ARRAY_REF:
65 case PARM_DECL:
66 case RESULT_DECL:
67 case ERROR_MARK:
68 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
69 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
70 return 1;
71 break;
72
73 case TARGET_EXPR:
74 case WITH_CLEANUP_EXPR:
75 return 1;
76
77 /* A currently unresolved scope ref. */
78 case SCOPE_REF:
79 my_friendly_abort (103);
80 case OFFSET_REF:
81 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
82 return 1;
83 return lvalue_p (TREE_OPERAND (ref, 0))
84 && lvalue_p (TREE_OPERAND (ref, 1));
85 break;
86
87 case COND_EXPR:
88 return (lvalue_p (TREE_OPERAND (ref, 1))
89 && lvalue_p (TREE_OPERAND (ref, 2)));
90
91 case MODIFY_EXPR:
92 return 1;
93
94 case COMPOUND_EXPR:
95 return lvalue_p (TREE_OPERAND (ref, 1));
96 }
97 }
98 return 0;
99 }
100
101 /* Return nonzero if REF is an lvalue valid for this language;
102 otherwise, print an error message and return zero. */
103
104 int
105 lvalue_or_else (ref, string)
106 tree ref;
107 char *string;
108 {
109 int win = lvalue_p (ref);
110 if (! win)
111 error ("non-lvalue in %s", string);
112 return win;
113 }
114
115 /* INIT is a CALL_EXPR which needs info about its target.
116 TYPE is the type that this initialization should appear to have.
117
118 Build an encapsulation of the initialization to perform
119 and return it so that it can be processed by language-independent
120 and language-specific expression expanders.
121
122 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
123 Otherwise, cleanups are not built here. For example, when building
124 an initialization for a stack slot, since the called function handles
125 the cleanup, we would not want to do it here. */
126 tree
127 build_cplus_new (type, init, with_cleanup_p)
128 tree type;
129 tree init;
130 int with_cleanup_p;
131 {
132 tree slot = build (VAR_DECL, type);
133 tree rval = build (NEW_EXPR, type,
134 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
135 TREE_SIDE_EFFECTS (rval) = 1;
136 TREE_ADDRESSABLE (rval) = 1;
137 rval = build (TARGET_EXPR, type, slot, rval, 0);
138 TREE_SIDE_EFFECTS (rval) = 1;
139 TREE_ADDRESSABLE (rval) = 1;
140
141 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
142 {
143 TREE_OPERAND (rval, 2) = error_mark_node;
144 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
145 build_delete (TYPE_POINTER_TO (type),
146 build_unary_op (ADDR_EXPR, slot, 0),
147 integer_two_node,
148 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
149 TREE_SIDE_EFFECTS (rval) = 1;
150 TREE_ADDRESSABLE (rval) = 1;
151 }
152 return rval;
153 }
154
155 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
156 these CALL_EXPRs with tree nodes that will perform the cleanups. */
157
158 tree
159 break_out_cleanups (exp)
160 tree exp;
161 {
162 tree tmp = exp;
163
164 if (TREE_CODE (tmp) == CALL_EXPR
165 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
166 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
167
168 while (TREE_CODE (tmp) == NOP_EXPR
169 || TREE_CODE (tmp) == CONVERT_EXPR
170 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
171 {
172 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
173 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
174 {
175 TREE_OPERAND (tmp, 0)
176 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
177 TREE_OPERAND (tmp, 0), 1);
178 break;
179 }
180 else
181 tmp = TREE_OPERAND (tmp, 0);
182 }
183 return exp;
184 }
185
186 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
187 copies where they are found. Returns a deep copy all nodes transitively
188 containing CALL_EXPRs. */
189
190 tree
191 break_out_calls (exp)
192 tree exp;
193 {
194 register tree t1, t2;
195 register enum tree_code code;
196 register int changed = 0;
197 register int i;
198
199 if (exp == NULL_TREE)
200 return exp;
201
202 code = TREE_CODE (exp);
203
204 if (code == CALL_EXPR)
205 return copy_node (exp);
206
207 /* Don't try and defeat a save_expr, as it should only be done once. */
208 if (code == SAVE_EXPR)
209 return exp;
210
211 switch (TREE_CODE_CLASS (code))
212 {
213 default:
214 abort ();
215
216 case 'c': /* a constant */
217 case 't': /* a type node */
218 case 'x': /* something random, like an identifier or an ERROR_MARK. */
219 return exp;
220
221 case 'd': /* A decl node */
222 t1 = break_out_calls (DECL_INITIAL (exp));
223 if (t1 != DECL_INITIAL (exp))
224 {
225 exp = copy_node (exp);
226 DECL_INITIAL (exp) = t1;
227 }
228 return exp;
229
230 case 'b': /* A block node */
231 {
232 /* Don't know how to handle these correctly yet. Must do a
233 break_out_calls on all DECL_INITIAL values for local variables,
234 and also break_out_calls on all sub-blocks and sub-statements. */
235 abort ();
236 }
237 return exp;
238
239 case 'e': /* an expression */
240 case 'r': /* a reference */
241 case 's': /* an expression with side effects */
242 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
243 {
244 t1 = break_out_calls (TREE_OPERAND (exp, i));
245 if (t1 != TREE_OPERAND (exp, i))
246 {
247 exp = copy_node (exp);
248 TREE_OPERAND (exp, i) = t1;
249 }
250 }
251 return exp;
252
253 case '<': /* a comparison expression */
254 case '2': /* a binary arithmetic expression */
255 t2 = break_out_calls (TREE_OPERAND (exp, 1));
256 if (t2 != TREE_OPERAND (exp, 1))
257 changed = 1;
258 case '1': /* a unary arithmetic expression */
259 t1 = break_out_calls (TREE_OPERAND (exp, 0));
260 if (t1 != TREE_OPERAND (exp, 0))
261 changed = 1;
262 if (changed)
263 {
264 if (tree_code_length[(int) code] == 1)
265 return build1 (code, TREE_TYPE (exp), t1);
266 else
267 return build (code, TREE_TYPE (exp), t1, t2);
268 }
269 return exp;
270 }
271
272 }
273 \f
274 extern struct obstack *current_obstack;
275 extern struct obstack permanent_obstack, class_obstack;
276 extern struct obstack *saveable_obstack;
277
278 /* Here is how primitive or already-canonicalized types' hash
279 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
280 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
281
282 /* Construct, lay out and return the type of methods belonging to class
283 BASETYPE and whose arguments are described by ARGTYPES and whose values
284 are described by RETTYPE. If each type exists already, reuse it. */
285 tree
286 build_cplus_method_type (basetype, rettype, argtypes)
287 tree basetype, rettype, argtypes;
288 {
289 register tree t;
290 tree ptype;
291 int hashcode;
292
293 /* Make a node of the sort we want. */
294 t = make_node (METHOD_TYPE);
295
296 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
297 TREE_TYPE (t) = rettype;
298 if (IS_SIGNATURE (basetype))
299 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
300 TYPE_READONLY (basetype),
301 TYPE_VOLATILE (basetype));
302 else
303 {
304 ptype = build_pointer_type (basetype);
305 ptype = build_type_variant (ptype, 1, 0);
306 }
307 /* The actual arglist for this function includes a "hidden" argument
308 which is "this". Put it into the list of argument types. */
309
310 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
311 TYPE_ARG_TYPES (t) = argtypes;
312 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
313
314 /* If we already have such a type, use the old one and free this one.
315 Note that it also frees up the above cons cell if found. */
316 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
317 t = type_hash_canon (hashcode, t);
318
319 if (TYPE_SIZE (t) == 0)
320 layout_type (t);
321
322 return t;
323 }
324
325 tree
326 build_cplus_staticfn_type (basetype, rettype, argtypes)
327 tree basetype, rettype, argtypes;
328 {
329 register tree t;
330 int hashcode;
331
332 /* Make a node of the sort we want. */
333 t = make_node (FUNCTION_TYPE);
334
335 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
336 TREE_TYPE (t) = rettype;
337
338 /* The actual arglist for this function includes a "hidden" argument
339 which is "this". Put it into the list of argument types. */
340
341 TYPE_ARG_TYPES (t) = argtypes;
342
343 /* If we already have such a type, use the old one and free this one.
344 Note that it also frees up the above cons cell if found. */
345 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
346 t = type_hash_canon (hashcode, t);
347
348 if (TYPE_SIZE (t) == 0)
349 layout_type (t);
350
351 return t;
352 }
353
354 tree
355 build_cplus_array_type (elt_type, index_type)
356 tree elt_type;
357 tree index_type;
358 {
359 register struct obstack *ambient_obstack = current_obstack;
360 register struct obstack *ambient_saveable_obstack = saveable_obstack;
361 tree t;
362
363 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
364 make this permanent too. */
365 if (TREE_PERMANENT (elt_type)
366 && (index_type == 0 || TREE_PERMANENT (index_type)))
367 {
368 current_obstack = &permanent_obstack;
369 saveable_obstack = &permanent_obstack;
370 }
371
372 t = build_array_type (elt_type, index_type);
373
374 /* Push these needs up so that initialization takes place
375 more easily. */
376 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
377 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
378 current_obstack = ambient_obstack;
379 saveable_obstack = ambient_saveable_obstack;
380 return t;
381 }
382 \f
383 /* Add OFFSET to all base types of T.
384
385 OFFSET, which is a type offset, is number of bytes.
386
387 Note that we don't have to worry about having two paths to the
388 same base type, since this type owns its association list. */
389 void
390 propagate_binfo_offsets (binfo, offset)
391 tree binfo;
392 tree offset;
393 {
394 tree binfos = BINFO_BASETYPES (binfo);
395 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
396
397 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
398 {
399 tree base_binfo = TREE_VEC_ELT (binfos, i);
400
401 if (TREE_VIA_VIRTUAL (base_binfo))
402 i += 1;
403 else
404 {
405 int j;
406 tree base_binfos = BINFO_BASETYPES (base_binfo);
407 tree delta;
408
409 for (j = i+1; j < n_baselinks; j++)
410 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
411 {
412 /* The next basetype offset must take into account the space
413 between the classes, not just the size of each class. */
414 delta = size_binop (MINUS_EXPR,
415 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
416 BINFO_OFFSET (base_binfo));
417 break;
418 }
419
420 #if 0
421 if (BINFO_OFFSET_ZEROP (base_binfo))
422 BINFO_OFFSET (base_binfo) = offset;
423 else
424 BINFO_OFFSET (base_binfo)
425 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
426 #else
427 BINFO_OFFSET (base_binfo) = offset;
428 #endif
429 if (base_binfos)
430 {
431 int k;
432 tree chain = NULL_TREE;
433
434 /* Now unshare the structure beneath BASE_BINFO. */
435 for (k = TREE_VEC_LENGTH (base_binfos)-1;
436 k >= 0; k--)
437 {
438 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
439 if (! TREE_VIA_VIRTUAL (base_base_binfo))
440 TREE_VEC_ELT (base_binfos, k)
441 = make_binfo (BINFO_OFFSET (base_base_binfo),
442 base_base_binfo,
443 BINFO_VTABLE (base_base_binfo),
444 BINFO_VIRTUALS (base_base_binfo),
445 chain);
446 chain = TREE_VEC_ELT (base_binfos, k);
447 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
448 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
449 }
450 /* Now propagate the offset to the base types. */
451 propagate_binfo_offsets (base_binfo, offset);
452 }
453
454 /* Go to our next class that counts for offset propagation. */
455 i = j;
456 if (i < n_baselinks)
457 offset = size_binop (PLUS_EXPR, offset, delta);
458 }
459 }
460 }
461
462 /* Compute the actual offsets that our virtual base classes
463 will have *for this type*. This must be performed after
464 the fields are laid out, since virtual baseclasses must
465 lay down at the end of the record.
466
467 Returns the maximum number of virtual functions any of the virtual
468 baseclasses provide. */
469 int
470 layout_vbasetypes (rec, max)
471 tree rec;
472 int max;
473 {
474 /* Get all the virtual base types that this type uses.
475 The TREE_VALUE slot holds the virtual baseclass type. */
476 tree vbase_types = get_vbase_types (rec);
477
478 #ifdef STRUCTURE_SIZE_BOUNDARY
479 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
480 #else
481 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
482 #endif
483 int desired_align;
484
485 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
486 where CONST_SIZE is an integer
487 and VAR_SIZE is a tree expression.
488 If VAR_SIZE is null, the size is just CONST_SIZE.
489 Naturally we try to avoid using VAR_SIZE. */
490 register unsigned const_size = 0;
491 register tree var_size = 0;
492 int nonvirtual_const_size;
493 tree nonvirtual_var_size;
494
495 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
496
497 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
498 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
499 else
500 var_size = TYPE_SIZE (rec);
501
502 nonvirtual_const_size = const_size;
503 nonvirtual_var_size = var_size;
504
505 while (vbase_types)
506 {
507 tree basetype = BINFO_TYPE (vbase_types);
508 tree offset;
509
510 desired_align = TYPE_ALIGN (basetype);
511 record_align = MAX (record_align, desired_align);
512
513 if (const_size == 0)
514 offset = integer_zero_node;
515 else
516 {
517 /* Give each virtual base type the alignment it wants. */
518 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
519 * TYPE_ALIGN (basetype);
520 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
521 }
522
523 if (CLASSTYPE_VSIZE (basetype) > max)
524 max = CLASSTYPE_VSIZE (basetype);
525 BINFO_OFFSET (vbase_types) = offset;
526
527 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
528 const_size += MAX (BITS_PER_UNIT,
529 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
530 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
531 else if (var_size == 0)
532 var_size = TYPE_SIZE (basetype);
533 else
534 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
535
536 vbase_types = TREE_CHAIN (vbase_types);
537 }
538
539 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
540 here, as that is for this class, without any virtual base classes. */
541 TYPE_ALIGN (rec) = record_align;
542 if (const_size != nonvirtual_const_size)
543 {
544 CLASSTYPE_VBASE_SIZE (rec)
545 = size_int (const_size - nonvirtual_const_size);
546 TYPE_SIZE (rec) = size_int (const_size);
547 }
548
549 /* Now propagate offset information throughout the lattice
550 under the vbase type. */
551 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
552 vbase_types = TREE_CHAIN (vbase_types))
553 {
554 tree base_binfos = BINFO_BASETYPES (vbase_types);
555
556 if (base_binfos)
557 {
558 tree chain = NULL_TREE;
559 int j;
560 /* Now unshare the structure beneath BASE_BINFO. */
561
562 for (j = TREE_VEC_LENGTH (base_binfos)-1;
563 j >= 0; j--)
564 {
565 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
566 if (! TREE_VIA_VIRTUAL (base_base_binfo))
567 TREE_VEC_ELT (base_binfos, j)
568 = make_binfo (BINFO_OFFSET (base_base_binfo),
569 base_base_binfo,
570 BINFO_VTABLE (base_base_binfo),
571 BINFO_VIRTUALS (base_base_binfo),
572 chain);
573 chain = TREE_VEC_ELT (base_binfos, j);
574 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
575 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
576 }
577
578 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
579 }
580 }
581
582 return max;
583 }
584
585 /* Lay out the base types of a record type, REC.
586 Tentatively set the size and alignment of REC
587 according to the base types alone.
588
589 Offsets for immediate nonvirtual baseclasses are also computed here.
590
591 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
592 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
593
594 Returns list of virtual base classes in a FIELD_DECL chain. */
595 tree
596 layout_basetypes (rec, binfos)
597 tree rec, binfos;
598 {
599 /* Chain to hold all the new FIELD_DECLs which point at virtual
600 base classes. */
601 tree vbase_decls = NULL_TREE;
602
603 #ifdef STRUCTURE_SIZE_BOUNDARY
604 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
605 #else
606 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
607 #endif
608
609 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
610 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
611 the size is just CONST_SIZE. Naturally we try to avoid using
612 VAR_SIZE. And so far, we've been sucessful. */
613 #if 0
614 register tree var_size = 0;
615 #endif
616
617 register unsigned const_size = 0;
618 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
619
620 /* Handle basetypes almost like fields, but record their
621 offsets differently. */
622
623 for (i = 0; i < n_baseclasses; i++)
624 {
625 int inc, desired_align, int_vbase_size;
626 register tree base_binfo = TREE_VEC_ELT (binfos, i);
627 register tree basetype = BINFO_TYPE (base_binfo);
628 tree decl, offset;
629
630 if (TYPE_SIZE (basetype) == 0)
631 {
632 #if 0
633 /* This error is now reported in xref_tag, thus giving better
634 location information. */
635 error_with_aggr_type (base_binfo,
636 "base class `%s' has incomplete type");
637
638 TREE_VIA_PUBLIC (base_binfo) = 1;
639 TREE_VIA_PROTECTED (base_binfo) = 0;
640 TREE_VIA_VIRTUAL (base_binfo) = 0;
641
642 /* Should handle this better so that
643
644 class A;
645 class B: private A { virtual void F(); };
646
647 does not dump core when compiled. */
648 my_friendly_abort (121);
649 #endif
650 continue;
651 }
652
653 /* All basetypes are recorded in the association list of the
654 derived type. */
655
656 if (TREE_VIA_VIRTUAL (base_binfo))
657 {
658 int j;
659 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
660 + sizeof (VBASE_NAME) + 1);
661
662 /* The offset for a virtual base class is only used in computing
663 virtual function tables and for initializing virtual base
664 pointers. It is built once `get_vbase_types' is called. */
665
666 /* If this basetype can come from another vbase pointer
667 without an additional indirection, we will share
668 that pointer. If an indirection is involved, we
669 make our own pointer. */
670 for (j = 0; j < n_baseclasses; j++)
671 {
672 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
673 if (! TREE_VIA_VIRTUAL (other_base_binfo)
674 && binfo_member (basetype,
675 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
676 goto got_it;
677 }
678 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
679 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
680 build_pointer_type (basetype));
681 /* If you change any of the below, take a look at all the
682 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
683 them too. */
684 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
685 DECL_VIRTUAL_P (decl) = 1;
686 DECL_FIELD_CONTEXT (decl) = rec;
687 DECL_CLASS_CONTEXT (decl) = rec;
688 DECL_FCONTEXT (decl) = basetype;
689 DECL_FIELD_SIZE (decl) = 0;
690 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
691 TREE_CHAIN (decl) = vbase_decls;
692 BINFO_VPTR_FIELD (base_binfo) = decl;
693 vbase_decls = decl;
694
695 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
696 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
697 {
698 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
699 "destructor `%s' non-virtual");
700 warning ("in inheritance relationship `%s: virtual %s'",
701 TYPE_NAME_STRING (rec),
702 TYPE_NAME_STRING (basetype));
703 }
704 got_it:
705 /* The space this decl occupies has already been accounted for. */
706 continue;
707 }
708
709 if (const_size == 0)
710 offset = integer_zero_node;
711 else
712 {
713 /* Give each base type the alignment it wants. */
714 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
715 * TYPE_ALIGN (basetype);
716 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
717
718 #if 0
719 /* bpk: Disabled this check until someone is willing to
720 claim it as theirs and explain exactly what circumstances
721 warrant the warning. */
722 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
723 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
724 {
725 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
726 "destructor `%s' non-virtual");
727 warning ("in inheritance relationship `%s:%s %s'",
728 TYPE_NAME_STRING (rec),
729 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
730 TYPE_NAME_STRING (basetype));
731 }
732 #endif
733 }
734 BINFO_OFFSET (base_binfo) = offset;
735 if (CLASSTYPE_VSIZE (basetype))
736 {
737 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
738 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
739 }
740 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
741 TYPE_BINFO (rec) = base_binfo;
742
743 /* Add only the amount of storage not present in
744 the virtual baseclasses. */
745
746 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
747 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
748 {
749 inc = MAX (record_align,
750 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
751 - int_vbase_size));
752
753 /* Record must have at least as much alignment as any field. */
754 desired_align = TYPE_ALIGN (basetype);
755 record_align = MAX (record_align, desired_align);
756
757 const_size += inc;
758 }
759 }
760
761 if (const_size)
762 CLASSTYPE_SIZE (rec) = size_int (const_size);
763 else
764 CLASSTYPE_SIZE (rec) = integer_zero_node;
765 CLASSTYPE_ALIGN (rec) = record_align;
766
767 return vbase_decls;
768 }
769 \f
770 /* Hashing of lists so that we don't make duplicates.
771 The entry point is `list_hash_canon'. */
772
773 /* Each hash table slot is a bucket containing a chain
774 of these structures. */
775
776 struct list_hash
777 {
778 struct list_hash *next; /* Next structure in the bucket. */
779 int hashcode; /* Hash code of this list. */
780 tree list; /* The list recorded here. */
781 };
782
783 /* Now here is the hash table. When recording a list, it is added
784 to the slot whose index is the hash code mod the table size.
785 Note that the hash table is used for several kinds of lists.
786 While all these live in the same table, they are completely independent,
787 and the hash code is computed differently for each of these. */
788
789 #define TYPE_HASH_SIZE 59
790 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
791
792 /* Compute a hash code for a list (chain of TREE_LIST nodes
793 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
794 TREE_COMMON slots), by adding the hash codes of the individual entries. */
795
796 int
797 list_hash (list)
798 tree list;
799 {
800 register int hashcode = 0;
801
802 if (TREE_CHAIN (list))
803 hashcode += TYPE_HASH (TREE_CHAIN (list));
804
805 if (TREE_VALUE (list))
806 hashcode += TYPE_HASH (TREE_VALUE (list));
807 else
808 hashcode += 1007;
809 if (TREE_PURPOSE (list))
810 hashcode += TYPE_HASH (TREE_PURPOSE (list));
811 else
812 hashcode += 1009;
813 return hashcode;
814 }
815
816 /* Look in the type hash table for a type isomorphic to TYPE.
817 If one is found, return it. Otherwise return 0. */
818
819 tree
820 list_hash_lookup (hashcode, list)
821 int hashcode;
822 tree list;
823 {
824 register struct list_hash *h;
825 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
826 if (h->hashcode == hashcode
827 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
828 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
829 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
830 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
831 && TREE_VALUE (h->list) == TREE_VALUE (list)
832 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
833 {
834 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
835 return h->list;
836 }
837 return 0;
838 }
839
840 /* Add an entry to the list-hash-table
841 for a list TYPE whose hash code is HASHCODE. */
842
843 void
844 list_hash_add (hashcode, list)
845 int hashcode;
846 tree list;
847 {
848 register struct list_hash *h;
849
850 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
851 h->hashcode = hashcode;
852 h->list = list;
853 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
854 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
855 }
856
857 /* Given TYPE, and HASHCODE its hash code, return the canonical
858 object for an identical list if one already exists.
859 Otherwise, return TYPE, and record it as the canonical object
860 if it is a permanent object.
861
862 To use this function, first create a list of the sort you want.
863 Then compute its hash code from the fields of the list that
864 make it different from other similar lists.
865 Then call this function and use the value.
866 This function frees the list you pass in if it is a duplicate. */
867
868 /* Set to 1 to debug without canonicalization. Never set by program. */
869 static int debug_no_list_hash = 0;
870
871 tree
872 list_hash_canon (hashcode, list)
873 int hashcode;
874 tree list;
875 {
876 tree t1;
877
878 if (debug_no_list_hash)
879 return list;
880
881 t1 = list_hash_lookup (hashcode, list);
882 if (t1 != 0)
883 {
884 obstack_free (&class_obstack, list);
885 return t1;
886 }
887
888 /* If this is a new list, record it for later reuse. */
889 list_hash_add (hashcode, list);
890
891 return list;
892 }
893
894 tree
895 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
896 int via_public, via_virtual, via_protected;
897 tree purpose, value, chain;
898 {
899 struct obstack *ambient_obstack = current_obstack;
900 tree t;
901 int hashcode;
902
903 current_obstack = &class_obstack;
904 t = tree_cons (purpose, value, chain);
905 TREE_VIA_PUBLIC (t) = via_public;
906 TREE_VIA_PROTECTED (t) = via_protected;
907 TREE_VIA_VIRTUAL (t) = via_virtual;
908 hashcode = list_hash (t);
909 t = list_hash_canon (hashcode, t);
910 current_obstack = ambient_obstack;
911 return t;
912 }
913
914 /* Constructor for hashed lists. */
915 tree
916 hash_tree_chain (value, chain)
917 tree value, chain;
918 {
919 struct obstack *ambient_obstack = current_obstack;
920 tree t;
921 int hashcode;
922
923 current_obstack = &class_obstack;
924 t = tree_cons (NULL_TREE, value, chain);
925 hashcode = list_hash (t);
926 t = list_hash_canon (hashcode, t);
927 current_obstack = ambient_obstack;
928 return t;
929 }
930
931 /* Similar, but used for concatenating two lists. */
932 tree
933 hash_chainon (list1, list2)
934 tree list1, list2;
935 {
936 if (list2 == 0)
937 return list1;
938 if (list1 == 0)
939 return list2;
940 if (TREE_CHAIN (list1) == NULL_TREE)
941 return hash_tree_chain (TREE_VALUE (list1), list2);
942 return hash_tree_chain (TREE_VALUE (list1),
943 hash_chainon (TREE_CHAIN (list1), list2));
944 }
945
946 static tree
947 get_identifier_list (value)
948 tree value;
949 {
950 tree list = IDENTIFIER_AS_LIST (value);
951 if (list != NULL_TREE
952 && (TREE_CODE (list) != TREE_LIST
953 || TREE_VALUE (list) != value))
954 list = NULL_TREE;
955 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
956 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
957 && IDENTIFIER_TYPE_VALUE (value)
958 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
959 {
960 tree type = IDENTIFIER_TYPE_VALUE (value);
961
962 if (TYPE_PTRMEMFUNC_P (type))
963 list = NULL_TREE;
964 else if (type == current_class_type)
965 /* Don't mess up the constructor name. */
966 list = tree_cons (NULL_TREE, value, NULL_TREE);
967 else
968 {
969 register tree id;
970 /* This will return the correct thing for regular types,
971 nested types, and templates. Yay! */
972 if (TYPE_NESTED_NAME (type))
973 id = TYPE_NESTED_NAME (type);
974 else
975 id = TYPE_IDENTIFIER (type);
976
977 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
978 CLASSTYPE_ID_AS_LIST (type)
979 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
980 list = CLASSTYPE_ID_AS_LIST (type);
981 }
982 }
983 return list;
984 }
985
986 tree
987 get_decl_list (value)
988 tree value;
989 {
990 tree list = NULL_TREE;
991
992 if (TREE_CODE (value) == IDENTIFIER_NODE)
993 list = get_identifier_list (value);
994 else if (TREE_CODE (value) == RECORD_TYPE
995 && TYPE_LANG_SPECIFIC (value))
996 list = CLASSTYPE_AS_LIST (value);
997
998 if (list != NULL_TREE)
999 {
1000 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1001 return list;
1002 }
1003
1004 return build_decl_list (NULL_TREE, value);
1005 }
1006
1007 /* Look in the type hash table for a type isomorphic to
1008 `build_tree_list (NULL_TREE, VALUE)'.
1009 If one is found, return it. Otherwise return 0. */
1010
1011 tree
1012 list_hash_lookup_or_cons (value)
1013 tree value;
1014 {
1015 register int hashcode = TYPE_HASH (value);
1016 register struct list_hash *h;
1017 struct obstack *ambient_obstack;
1018 tree list = NULL_TREE;
1019
1020 if (TREE_CODE (value) == IDENTIFIER_NODE)
1021 list = get_identifier_list (value);
1022 else if (TREE_CODE (value) == TYPE_DECL
1023 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1024 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1025 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1026 else if (TREE_CODE (value) == RECORD_TYPE
1027 && TYPE_LANG_SPECIFIC (value))
1028 list = CLASSTYPE_AS_LIST (value);
1029
1030 if (list != NULL_TREE)
1031 {
1032 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1033 return list;
1034 }
1035
1036 if (debug_no_list_hash)
1037 return hash_tree_chain (value, NULL_TREE);
1038
1039 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1040 if (h->hashcode == hashcode
1041 && TREE_VIA_VIRTUAL (h->list) == 0
1042 && TREE_VIA_PUBLIC (h->list) == 0
1043 && TREE_VIA_PROTECTED (h->list) == 0
1044 && TREE_PURPOSE (h->list) == 0
1045 && TREE_VALUE (h->list) == value)
1046 {
1047 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1048 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1049 return h->list;
1050 }
1051
1052 ambient_obstack = current_obstack;
1053 current_obstack = &class_obstack;
1054 list = build_tree_list (NULL_TREE, value);
1055 list_hash_add (hashcode, list);
1056 current_obstack = ambient_obstack;
1057 return list;
1058 }
1059 \f
1060 /* Build an association between TYPE and some parameters:
1061
1062 OFFSET is the offset added to `this' to convert it to a pointer
1063 of type `TYPE *'
1064
1065 BINFO is the base binfo to use, if we are deriving from one. This
1066 is necessary, as we want specialized parent binfos from base
1067 classes, so that the VTABLE_NAMEs of bases are for the most derived
1068 type, instead of of the simple type.
1069
1070 VTABLE is the virtual function table with which to initialize
1071 sub-objects of type TYPE.
1072
1073 VIRTUALS are the virtual functions sitting in VTABLE.
1074
1075 CHAIN are more associations we must retain. */
1076
1077 tree
1078 make_binfo (offset, binfo, vtable, virtuals, chain)
1079 tree offset, binfo;
1080 tree vtable, virtuals;
1081 tree chain;
1082 {
1083 tree new_binfo = make_tree_vec (6);
1084 tree type;
1085
1086 if (TREE_CODE (binfo) == TREE_VEC)
1087 type = BINFO_TYPE (binfo);
1088 else
1089 {
1090 type = binfo;
1091 binfo = TYPE_BINFO (binfo);
1092 }
1093
1094 TREE_CHAIN (new_binfo) = chain;
1095 if (chain)
1096 TREE_USED (new_binfo) = TREE_USED (chain);
1097
1098 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1099 BINFO_OFFSET (new_binfo) = offset;
1100 BINFO_VTABLE (new_binfo) = vtable;
1101 BINFO_VIRTUALS (new_binfo) = virtuals;
1102 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1103
1104 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1105 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1106 return new_binfo;
1107 }
1108
1109 tree
1110 copy_binfo (list)
1111 tree list;
1112 {
1113 tree binfo = copy_list (list);
1114 tree rval = binfo;
1115 while (binfo)
1116 {
1117 TREE_USED (binfo) = 0;
1118 if (BINFO_BASETYPES (binfo))
1119 BINFO_BASETYPES (binfo) = copy_node (BINFO_BASETYPES (binfo));
1120 binfo = TREE_CHAIN (binfo);
1121 }
1122 return rval;
1123 }
1124
1125 /* Return the binfo value for ELEM in TYPE. */
1126
1127 tree
1128 binfo_value (elem, type)
1129 tree elem;
1130 tree type;
1131 {
1132 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1133 compiler_error ("base class `%s' ambiguous in binfo_value",
1134 TYPE_NAME_STRING (elem));
1135 if (elem == type)
1136 return TYPE_BINFO (type);
1137 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1138 return type;
1139 return get_binfo (elem, type, 0);
1140 }
1141
1142 tree
1143 reverse_path (path)
1144 tree path;
1145 {
1146 register tree prev = 0, tmp, next;
1147 for (tmp = path; tmp; tmp = next)
1148 {
1149 next = BINFO_INHERITANCE_CHAIN (tmp);
1150 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1151 prev = tmp;
1152 }
1153 return prev;
1154 }
1155
1156 tree
1157 virtual_member (elem, list)
1158 tree elem;
1159 tree list;
1160 {
1161 tree t;
1162 tree rval, nval;
1163
1164 for (t = list; t; t = TREE_CHAIN (t))
1165 if (elem == BINFO_TYPE (t))
1166 return t;
1167 rval = 0;
1168 for (t = list; t; t = TREE_CHAIN (t))
1169 {
1170 tree binfos = BINFO_BASETYPES (t);
1171 int i;
1172
1173 if (binfos != NULL_TREE)
1174 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1175 {
1176 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1177 if (nval)
1178 {
1179 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1180 my_friendly_abort (104);
1181 rval = nval;
1182 }
1183 }
1184 }
1185 return rval;
1186 }
1187
1188 /* Return the offset (as an INTEGER_CST) for ELEM in LIST.
1189 INITIAL_OFFSET is the value to add to the offset that ELEM's
1190 binfo entry in LIST provides.
1191
1192 Returns NULL if ELEM does not have an binfo value in LIST. */
1193
1194 tree
1195 virtual_offset (elem, list, initial_offset)
1196 tree elem;
1197 tree list;
1198 tree initial_offset;
1199 {
1200 tree vb, offset;
1201 tree rval, nval;
1202
1203 for (vb = list; vb; vb = TREE_CHAIN (vb))
1204 if (elem == BINFO_TYPE (vb))
1205 return size_binop (PLUS_EXPR, initial_offset, BINFO_OFFSET (vb));
1206 rval = 0;
1207 for (vb = list; vb; vb = TREE_CHAIN (vb))
1208 {
1209 tree binfos = BINFO_BASETYPES (vb);
1210 int i;
1211
1212 if (binfos == NULL_TREE)
1213 continue;
1214
1215 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1216 {
1217 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1218 if (nval)
1219 {
1220 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1221 my_friendly_abort (105);
1222 offset = BINFO_OFFSET (vb);
1223 rval = nval;
1224 }
1225 }
1226 }
1227 if (rval == NULL_TREE)
1228 return rval;
1229 return size_binop (PLUS_EXPR, offset, BINFO_OFFSET (rval));
1230 }
1231
1232 void
1233 debug_binfo (elem)
1234 tree elem;
1235 {
1236 int i;
1237 tree virtuals;
1238
1239 fprintf (stderr, "type \"%s\"; offset = %d\n",
1240 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1241 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1242 fprintf (stderr, "vtable type:\n");
1243 debug_tree (BINFO_TYPE (elem));
1244 if (BINFO_VTABLE (elem))
1245 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1246 else
1247 fprintf (stderr, "no vtable decl yet\n");
1248 fprintf (stderr, "virtuals:\n");
1249 virtuals = BINFO_VIRTUALS (elem);
1250 if (virtuals != 0)
1251 {
1252 virtuals = TREE_CHAIN (virtuals);
1253 if (flag_dossier)
1254 virtuals = TREE_CHAIN (virtuals);
1255 }
1256 i = 1;
1257 while (virtuals)
1258 {
1259 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1260 fprintf (stderr, "%s [%d =? %d]\n",
1261 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1262 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1263 virtuals = TREE_CHAIN (virtuals);
1264 i += 1;
1265 }
1266 }
1267
1268 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1269 We expect a null pointer to mark the end of the chain.
1270 This is the Lisp primitive `length'. */
1271
1272 int
1273 decl_list_length (t)
1274 tree t;
1275 {
1276 register tree tail;
1277 register int len = 0;
1278
1279 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1280 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1281 for (tail = t; tail; tail = DECL_CHAIN (tail))
1282 len++;
1283
1284 return len;
1285 }
1286
1287 int
1288 count_functions (t)
1289 tree t;
1290 {
1291 if (TREE_CODE (t) == FUNCTION_DECL)
1292 return 1;
1293
1294 return decl_list_length (TREE_VALUE (t));
1295 }
1296
1297 /* Like value_member, but for DECL_CHAINs. */
1298 tree
1299 decl_value_member (elem, list)
1300 tree elem, list;
1301 {
1302 while (list)
1303 {
1304 if (elem == list)
1305 return list;
1306 list = DECL_CHAIN (list);
1307 }
1308 return NULL_TREE;
1309 }
1310
1311 int
1312 is_overloaded_fn (x)
1313 tree x;
1314 {
1315 if (TREE_CODE (x) == FUNCTION_DECL)
1316 return 1;
1317
1318 if (TREE_CODE (x) == TREE_LIST
1319 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1320 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1321 return 1;
1322
1323 return 0;
1324 }
1325
1326 int
1327 really_overloaded_fn (x)
1328 tree x;
1329 {
1330 if (TREE_CODE (x) == TREE_LIST
1331 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1332 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1333 return 1;
1334
1335 return 0;
1336 }
1337
1338 tree
1339 get_first_fn (from)
1340 tree from;
1341 {
1342 if (TREE_CODE (from) == FUNCTION_DECL)
1343 return from;
1344
1345 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1346
1347 return TREE_VALUE (from);
1348 }
1349
1350 tree
1351 fnaddr_from_vtable_entry (entry)
1352 tree entry;
1353 {
1354 if (flag_vtable_thunks)
1355 {
1356 tree func = entry;
1357 if (TREE_CODE (func) == ADDR_EXPR)
1358 func = TREE_OPERAND (func, 0);
1359 if (TREE_CODE (func) == THUNK_DECL)
1360 return DECL_INITIAL (func);
1361 else
1362 return entry;
1363 }
1364 else
1365 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1366 }
1367
1368 void
1369 set_fnaddr_from_vtable_entry (entry, value)
1370 tree entry, value;
1371 {
1372 if (flag_vtable_thunks)
1373 abort ();
1374 else
1375 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1376 }
1377
1378 tree
1379 function_arg_chain (t)
1380 tree t;
1381 {
1382 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1383 }
1384
1385 int
1386 promotes_to_aggr_type (t, code)
1387 tree t;
1388 enum tree_code code;
1389 {
1390 if (TREE_CODE (t) == code)
1391 t = TREE_TYPE (t);
1392 return IS_AGGR_TYPE (t);
1393 }
1394
1395 int
1396 is_aggr_type_2 (t1, t2)
1397 tree t1, t2;
1398 {
1399 if (TREE_CODE (t1) != TREE_CODE (t2))
1400 return 0;
1401 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1402 }
1403
1404 /* Give message using types TYPE1 and TYPE2 as arguments.
1405 PFN is the function which will print the message;
1406 S is the format string for PFN to use. */
1407 void
1408 message_2_types (pfn, s, type1, type2)
1409 void (*pfn) ();
1410 char *s;
1411 tree type1, type2;
1412 {
1413 tree name1 = TYPE_NAME (type1);
1414 tree name2 = TYPE_NAME (type2);
1415 if (TREE_CODE (name1) == TYPE_DECL)
1416 name1 = DECL_NAME (name1);
1417 if (TREE_CODE (name2) == TYPE_DECL)
1418 name2 = DECL_NAME (name2);
1419 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1420 }
1421 \f
1422 #define PRINT_RING_SIZE 4
1423
1424 char *
1425 lang_printable_name (decl)
1426 tree decl;
1427 {
1428 static tree decl_ring[PRINT_RING_SIZE];
1429 static char *print_ring[PRINT_RING_SIZE];
1430 static int ring_counter;
1431 int i;
1432
1433 /* Only cache functions. */
1434 if (TREE_CODE (decl) != FUNCTION_DECL
1435 || DECL_LANG_SPECIFIC (decl) == 0)
1436 return decl_as_string (decl, 1);
1437
1438 /* See if this print name is lying around. */
1439 for (i = 0; i < PRINT_RING_SIZE; i++)
1440 if (decl_ring[i] == decl)
1441 /* yes, so return it. */
1442 return print_ring[i];
1443
1444 if (++ring_counter == PRINT_RING_SIZE)
1445 ring_counter = 0;
1446
1447 if (current_function_decl != NULL_TREE)
1448 {
1449 if (decl_ring[ring_counter] == current_function_decl)
1450 ring_counter += 1;
1451 if (ring_counter == PRINT_RING_SIZE)
1452 ring_counter = 0;
1453 if (decl_ring[ring_counter] == current_function_decl)
1454 my_friendly_abort (106);
1455 }
1456
1457 if (print_ring[ring_counter])
1458 free (print_ring[ring_counter]);
1459
1460 {
1461 int print_ret_type_p
1462 = (!DECL_CONSTRUCTOR_P (decl)
1463 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1464
1465 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1466 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1467 strcpy (print_ring[ring_counter], name);
1468 decl_ring[ring_counter] = decl;
1469 }
1470 return print_ring[ring_counter];
1471 }
1472 \f
1473 /* Comparison function for sorting identifiers in RAISES lists.
1474 Note that because IDENTIFIER_NODEs are unique, we can sort
1475 them by address, saving an indirection. */
1476 static int
1477 id_cmp (p1, p2)
1478 tree *p1, *p2;
1479 {
1480 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1481 }
1482
1483 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1484 listed in RAISES. */
1485 tree
1486 build_exception_variant (ctype, type, raises)
1487 tree ctype, type;
1488 tree raises;
1489 {
1490 int i;
1491 tree v = TYPE_MAIN_VARIANT (type);
1492 tree t, t2, cname;
1493 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1494 int constp = TYPE_READONLY (type);
1495 int volatilep = TYPE_VOLATILE (type);
1496
1497 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1498 {
1499 if (TYPE_READONLY (v) != constp
1500 || TYPE_VOLATILE (v) != volatilep)
1501 continue;
1502
1503 t = raises;
1504 t2 = TYPE_RAISES_EXCEPTIONS (v);
1505 while (t && t2)
1506 {
1507 if (TREE_TYPE (t) == TREE_TYPE (t2))
1508 {
1509 t = TREE_CHAIN (t);
1510 t2 = TREE_CHAIN (t2);
1511 }
1512 else break;
1513 }
1514 if (t || t2)
1515 continue;
1516 /* List of exceptions raised matches previously found list.
1517
1518 @@ Nice to free up storage used in consing up the
1519 @@ list of exceptions raised. */
1520 return v;
1521 }
1522
1523 /* Need to build a new variant. */
1524 v = copy_node (type);
1525 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1526 TYPE_NEXT_VARIANT (type) = v;
1527 if (raises && ! TREE_PERMANENT (raises))
1528 {
1529 push_obstacks_nochange ();
1530 end_temporary_allocation ();
1531 raises = copy_list (raises);
1532 pop_obstacks ();
1533 }
1534 TYPE_RAISES_EXCEPTIONS (v) = raises;
1535 return v;
1536 }
1537
1538 /* Subroutine of copy_to_permanent
1539
1540 Assuming T is a node build bottom-up, make it all exist on
1541 permanent obstack, if it is not permanent already. */
1542 static tree
1543 make_deep_copy (t)
1544 tree t;
1545 {
1546 enum tree_code code;
1547
1548 if (t == NULL_TREE || TREE_PERMANENT (t))
1549 return t;
1550
1551 switch (code = TREE_CODE (t))
1552 {
1553 case ERROR_MARK:
1554 return error_mark_node;
1555
1556 case VAR_DECL:
1557 case FUNCTION_DECL:
1558 case CONST_DECL:
1559 break;
1560
1561 case PARM_DECL:
1562 {
1563 tree chain = TREE_CHAIN (t);
1564 t = copy_node (t);
1565 TREE_CHAIN (t) = make_deep_copy (chain);
1566 TREE_TYPE (t) = make_deep_copy (TREE_TYPE (t));
1567 DECL_INITIAL (t) = make_deep_copy (DECL_INITIAL (t));
1568 DECL_SIZE (t) = make_deep_copy (DECL_SIZE (t));
1569 return t;
1570 }
1571
1572 case TREE_LIST:
1573 {
1574 tree chain = TREE_CHAIN (t);
1575 t = copy_node (t);
1576 TREE_PURPOSE (t) = make_deep_copy (TREE_PURPOSE (t));
1577 TREE_VALUE (t) = make_deep_copy (TREE_VALUE (t));
1578 TREE_CHAIN (t) = make_deep_copy (chain);
1579 return t;
1580 }
1581
1582 case TREE_VEC:
1583 {
1584 int len = TREE_VEC_LENGTH (t);
1585
1586 t = copy_node (t);
1587 while (len--)
1588 TREE_VEC_ELT (t, len) = make_deep_copy (TREE_VEC_ELT (t, len));
1589 return t;
1590 }
1591
1592 case INTEGER_CST:
1593 case REAL_CST:
1594 case STRING_CST:
1595 return copy_node (t);
1596
1597 case COND_EXPR:
1598 case TARGET_EXPR:
1599 case NEW_EXPR:
1600 t = copy_node (t);
1601 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1602 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1603 TREE_OPERAND (t, 2) = make_deep_copy (TREE_OPERAND (t, 2));
1604 return t;
1605
1606 case SAVE_EXPR:
1607 t = copy_node (t);
1608 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1609 return t;
1610
1611 case MODIFY_EXPR:
1612 case PLUS_EXPR:
1613 case MINUS_EXPR:
1614 case MULT_EXPR:
1615 case TRUNC_DIV_EXPR:
1616 case TRUNC_MOD_EXPR:
1617 case MIN_EXPR:
1618 case MAX_EXPR:
1619 case LSHIFT_EXPR:
1620 case RSHIFT_EXPR:
1621 case BIT_IOR_EXPR:
1622 case BIT_XOR_EXPR:
1623 case BIT_AND_EXPR:
1624 case BIT_ANDTC_EXPR:
1625 case TRUTH_ANDIF_EXPR:
1626 case TRUTH_ORIF_EXPR:
1627 case LT_EXPR:
1628 case LE_EXPR:
1629 case GT_EXPR:
1630 case GE_EXPR:
1631 case EQ_EXPR:
1632 case NE_EXPR:
1633 case CEIL_DIV_EXPR:
1634 case FLOOR_DIV_EXPR:
1635 case ROUND_DIV_EXPR:
1636 case CEIL_MOD_EXPR:
1637 case FLOOR_MOD_EXPR:
1638 case ROUND_MOD_EXPR:
1639 case COMPOUND_EXPR:
1640 case PREDECREMENT_EXPR:
1641 case PREINCREMENT_EXPR:
1642 case POSTDECREMENT_EXPR:
1643 case POSTINCREMENT_EXPR:
1644 case CALL_EXPR:
1645 t = copy_node (t);
1646 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1647 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1648 return t;
1649
1650 case CONVERT_EXPR:
1651 case ADDR_EXPR:
1652 case INDIRECT_REF:
1653 case NEGATE_EXPR:
1654 case BIT_NOT_EXPR:
1655 case TRUTH_NOT_EXPR:
1656 case NOP_EXPR:
1657 case COMPONENT_REF:
1658 t = copy_node (t);
1659 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1660 return t;
1661
1662 /* This list is incomplete, but should suffice for now.
1663 It is very important that `sorry' does not call
1664 `report_error_function'. That could cause an infinite loop. */
1665 default:
1666 sorry ("initializer contains unrecognized tree code");
1667 return error_mark_node;
1668
1669 }
1670 my_friendly_abort (107);
1671 /* NOTREACHED */
1672 return NULL_TREE;
1673 }
1674
1675 /* Assuming T is a node built bottom-up, make it all exist on
1676 permanent obstack, if it is not permanent already. */
1677 tree
1678 copy_to_permanent (t)
1679 tree t;
1680 {
1681 register struct obstack *ambient_obstack = current_obstack;
1682 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1683
1684 if (t == NULL_TREE || TREE_PERMANENT (t))
1685 return t;
1686
1687 saveable_obstack = &permanent_obstack;
1688 current_obstack = saveable_obstack;
1689
1690 t = make_deep_copy (t);
1691
1692 current_obstack = ambient_obstack;
1693 saveable_obstack = ambient_saveable_obstack;
1694
1695 return t;
1696 }
1697
1698 void
1699 print_lang_statistics ()
1700 {
1701 extern struct obstack maybepermanent_obstack;
1702 print_obstack_statistics ("class_obstack", &class_obstack);
1703 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1704 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1705 print_search_statistics ();
1706 print_class_statistics ();
1707 }
1708
1709 /* This is used by the `assert' macro. It is provided in libgcc.a,
1710 which `cc' doesn't know how to link. Note that the C++ front-end
1711 no longer actually uses the `assert' macro (instead, it calls
1712 my_friendly_assert). But all of the back-end files still need this. */
1713 void
1714 __eprintf (string, expression, line, filename)
1715 #ifdef __STDC__
1716 const char *string;
1717 const char *expression;
1718 unsigned line;
1719 const char *filename;
1720 #else
1721 char *string;
1722 char *expression;
1723 unsigned line;
1724 char *filename;
1725 #endif
1726 {
1727 fprintf (stderr, string, expression, line, filename);
1728 fflush (stderr);
1729 abort ();
1730 }
1731
1732 /* Return, as an INTEGER_CST node, the number of elements for
1733 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1734
1735 tree
1736 array_type_nelts_top (type)
1737 tree type;
1738 {
1739 return fold (build (PLUS_EXPR, integer_type_node,
1740 array_type_nelts (type),
1741 integer_one_node));
1742 }
1743
1744 /* Return, as an INTEGER_CST node, the number of elements for
1745 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1746 ARRAY_TYPEs that are clumped together. */
1747
1748 tree
1749 array_type_nelts_total (type)
1750 tree type;
1751 {
1752 tree sz = array_type_nelts_top (type);
1753 type = TREE_TYPE (type);
1754 while (TREE_CODE (type) == ARRAY_TYPE)
1755 {
1756 tree n = array_type_nelts_top (type);
1757 sz = fold (build (MULT_EXPR, integer_type_node, sz, n));
1758 type = TREE_TYPE (type);
1759 }
1760 return sz;
1761 }