40th Cygnus<->FSF merge
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21 #include "config.h"
22 #include <stdio.h>
23 #include "obstack.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "flags.h"
27
28 #define CEIL(x,y) (((x) + (y) - 1) / (y))
29
30 /* Return nonzero if REF is an lvalue valid for this language.
31 Lvalues can be assigned, unless they have TREE_READONLY.
32 Lvalues can have their address taken, unless they have DECL_REGISTER. */
33
34 int
35 lvalue_p (ref)
36 tree ref;
37 {
38 register enum tree_code code = TREE_CODE (ref);
39
40 if (language_lvalue_valid (ref))
41 {
42 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
43 return 1;
44
45 switch (code)
46 {
47 /* preincrements and predecrements are valid lvals, provided
48 what they refer to are valid lvals. */
49 case PREINCREMENT_EXPR:
50 case PREDECREMENT_EXPR:
51 case COMPONENT_REF:
52 case SAVE_EXPR:
53 return lvalue_p (TREE_OPERAND (ref, 0));
54
55 case STRING_CST:
56 return 1;
57
58 case VAR_DECL:
59 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
60 && DECL_LANG_SPECIFIC (ref)
61 && DECL_IN_AGGR_P (ref))
62 return 0;
63 case INDIRECT_REF:
64 case ARRAY_REF:
65 case PARM_DECL:
66 case RESULT_DECL:
67 case ERROR_MARK:
68 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
69 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
70 return 1;
71 break;
72
73 case TARGET_EXPR:
74 case WITH_CLEANUP_EXPR:
75 return 1;
76
77 case CALL_EXPR:
78 /* unary_complex_lvalue knows how to deal with this case. */
79 if (TREE_ADDRESSABLE (TREE_TYPE (ref)))
80 return 1;
81 break;
82
83 /* A currently unresolved scope ref. */
84 case SCOPE_REF:
85 my_friendly_abort (103);
86 case OFFSET_REF:
87 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
88 return 1;
89 return lvalue_p (TREE_OPERAND (ref, 0))
90 && lvalue_p (TREE_OPERAND (ref, 1));
91 break;
92
93 case COND_EXPR:
94 return (lvalue_p (TREE_OPERAND (ref, 1))
95 && lvalue_p (TREE_OPERAND (ref, 2)));
96
97 case MODIFY_EXPR:
98 return 1;
99
100 case COMPOUND_EXPR:
101 return lvalue_p (TREE_OPERAND (ref, 1));
102 }
103 }
104 return 0;
105 }
106
107 /* Return nonzero if REF is an lvalue valid for this language;
108 otherwise, print an error message and return zero. */
109
110 int
111 lvalue_or_else (ref, string)
112 tree ref;
113 char *string;
114 {
115 int win = lvalue_p (ref);
116 if (! win)
117 error ("non-lvalue in %s", string);
118 return win;
119 }
120
121 /* INIT is a CALL_EXPR which needs info about its target.
122 TYPE is the type that this initialization should appear to have.
123
124 Build an encapsulation of the initialization to perform
125 and return it so that it can be processed by language-independent
126 and language-specific expression expanders.
127
128 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
129 Otherwise, cleanups are not built here. For example, when building
130 an initialization for a stack slot, since the called function handles
131 the cleanup, we would not want to do it here. */
132 tree
133 build_cplus_new (type, init, with_cleanup_p)
134 tree type;
135 tree init;
136 int with_cleanup_p;
137 {
138 tree slot = build (VAR_DECL, type);
139 tree rval = build (NEW_EXPR, type,
140 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
141 TREE_SIDE_EFFECTS (rval) = 1;
142 TREE_ADDRESSABLE (rval) = 1;
143 rval = build (TARGET_EXPR, type, slot, rval, 0);
144 TREE_SIDE_EFFECTS (rval) = 1;
145 TREE_ADDRESSABLE (rval) = 1;
146
147 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
148 {
149 TREE_OPERAND (rval, 2) = error_mark_node;
150 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
151 build_delete (TYPE_POINTER_TO (type),
152 build_unary_op (ADDR_EXPR, slot, 0),
153 integer_two_node,
154 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
155 TREE_SIDE_EFFECTS (rval) = 1;
156 TREE_ADDRESSABLE (rval) = 1;
157 }
158 return rval;
159 }
160
161 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
162 these CALL_EXPRs with tree nodes that will perform the cleanups. */
163
164 tree
165 break_out_cleanups (exp)
166 tree exp;
167 {
168 tree tmp = exp;
169
170 if (TREE_CODE (tmp) == CALL_EXPR
171 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
172 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
173
174 while (TREE_CODE (tmp) == NOP_EXPR
175 || TREE_CODE (tmp) == CONVERT_EXPR
176 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
177 {
178 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
179 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
180 {
181 TREE_OPERAND (tmp, 0)
182 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
183 TREE_OPERAND (tmp, 0), 1);
184 break;
185 }
186 else
187 tmp = TREE_OPERAND (tmp, 0);
188 }
189 return exp;
190 }
191
192 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
193 copies where they are found. Returns a deep copy all nodes transitively
194 containing CALL_EXPRs. */
195
196 tree
197 break_out_calls (exp)
198 tree exp;
199 {
200 register tree t1, t2;
201 register enum tree_code code;
202 register int changed = 0;
203 register int i;
204
205 if (exp == NULL_TREE)
206 return exp;
207
208 code = TREE_CODE (exp);
209
210 if (code == CALL_EXPR)
211 return copy_node (exp);
212
213 /* Don't try and defeat a save_expr, as it should only be done once. */
214 if (code == SAVE_EXPR)
215 return exp;
216
217 switch (TREE_CODE_CLASS (code))
218 {
219 default:
220 abort ();
221
222 case 'c': /* a constant */
223 case 't': /* a type node */
224 case 'x': /* something random, like an identifier or an ERROR_MARK. */
225 return exp;
226
227 case 'd': /* A decl node */
228 t1 = break_out_calls (DECL_INITIAL (exp));
229 if (t1 != DECL_INITIAL (exp))
230 {
231 exp = copy_node (exp);
232 DECL_INITIAL (exp) = t1;
233 }
234 return exp;
235
236 case 'b': /* A block node */
237 {
238 /* Don't know how to handle these correctly yet. Must do a
239 break_out_calls on all DECL_INITIAL values for local variables,
240 and also break_out_calls on all sub-blocks and sub-statements. */
241 abort ();
242 }
243 return exp;
244
245 case 'e': /* an expression */
246 case 'r': /* a reference */
247 case 's': /* an expression with side effects */
248 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
249 {
250 t1 = break_out_calls (TREE_OPERAND (exp, i));
251 if (t1 != TREE_OPERAND (exp, i))
252 {
253 exp = copy_node (exp);
254 TREE_OPERAND (exp, i) = t1;
255 }
256 }
257 return exp;
258
259 case '<': /* a comparison expression */
260 case '2': /* a binary arithmetic expression */
261 t2 = break_out_calls (TREE_OPERAND (exp, 1));
262 if (t2 != TREE_OPERAND (exp, 1))
263 changed = 1;
264 case '1': /* a unary arithmetic expression */
265 t1 = break_out_calls (TREE_OPERAND (exp, 0));
266 if (t1 != TREE_OPERAND (exp, 0))
267 changed = 1;
268 if (changed)
269 {
270 if (tree_code_length[(int) code] == 1)
271 return build1 (code, TREE_TYPE (exp), t1);
272 else
273 return build (code, TREE_TYPE (exp), t1, t2);
274 }
275 return exp;
276 }
277
278 }
279 \f
280 extern struct obstack *current_obstack;
281 extern struct obstack permanent_obstack, class_obstack;
282 extern struct obstack *saveable_obstack;
283
284 /* Here is how primitive or already-canonicalized types' hash
285 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
286 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
287
288 /* Construct, lay out and return the type of methods belonging to class
289 BASETYPE and whose arguments are described by ARGTYPES and whose values
290 are described by RETTYPE. If each type exists already, reuse it. */
291 tree
292 build_cplus_method_type (basetype, rettype, argtypes)
293 tree basetype, rettype, argtypes;
294 {
295 register tree t;
296 tree ptype;
297 int hashcode;
298
299 /* Make a node of the sort we want. */
300 t = make_node (METHOD_TYPE);
301
302 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
303 TREE_TYPE (t) = rettype;
304 if (IS_SIGNATURE (basetype))
305 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
306 TYPE_READONLY (basetype),
307 TYPE_VOLATILE (basetype));
308 else
309 {
310 ptype = build_pointer_type (basetype);
311 ptype = build_type_variant (ptype, 1, 0);
312 }
313 /* The actual arglist for this function includes a "hidden" argument
314 which is "this". Put it into the list of argument types. */
315
316 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
317 TYPE_ARG_TYPES (t) = argtypes;
318 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
319
320 /* If we already have such a type, use the old one and free this one.
321 Note that it also frees up the above cons cell if found. */
322 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
323 t = type_hash_canon (hashcode, t);
324
325 if (TYPE_SIZE (t) == 0)
326 layout_type (t);
327
328 return t;
329 }
330
331 tree
332 build_cplus_staticfn_type (basetype, rettype, argtypes)
333 tree basetype, rettype, argtypes;
334 {
335 register tree t;
336 int hashcode;
337
338 /* Make a node of the sort we want. */
339 t = make_node (FUNCTION_TYPE);
340
341 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
342 TREE_TYPE (t) = rettype;
343
344 /* The actual arglist for this function includes a "hidden" argument
345 which is "this". Put it into the list of argument types. */
346
347 TYPE_ARG_TYPES (t) = argtypes;
348
349 /* If we already have such a type, use the old one and free this one.
350 Note that it also frees up the above cons cell if found. */
351 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
352 t = type_hash_canon (hashcode, t);
353
354 if (TYPE_SIZE (t) == 0)
355 layout_type (t);
356
357 return t;
358 }
359
360 tree
361 build_cplus_array_type (elt_type, index_type)
362 tree elt_type;
363 tree index_type;
364 {
365 register struct obstack *ambient_obstack = current_obstack;
366 register struct obstack *ambient_saveable_obstack = saveable_obstack;
367 tree t;
368
369 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
370 make this permanent too. */
371 if (TREE_PERMANENT (elt_type)
372 && (index_type == 0 || TREE_PERMANENT (index_type)))
373 {
374 current_obstack = &permanent_obstack;
375 saveable_obstack = &permanent_obstack;
376 }
377
378 t = build_array_type (elt_type, index_type);
379
380 /* Push these needs up so that initialization takes place
381 more easily. */
382 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
383 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
384 current_obstack = ambient_obstack;
385 saveable_obstack = ambient_saveable_obstack;
386 return t;
387 }
388 \f
389 /* Add OFFSET to all base types of T.
390
391 OFFSET, which is a type offset, is number of bytes.
392
393 Note that we don't have to worry about having two paths to the
394 same base type, since this type owns its association list. */
395 void
396 propagate_binfo_offsets (binfo, offset)
397 tree binfo;
398 tree offset;
399 {
400 tree binfos = BINFO_BASETYPES (binfo);
401 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
402
403 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
404 {
405 tree base_binfo = TREE_VEC_ELT (binfos, i);
406
407 if (TREE_VIA_VIRTUAL (base_binfo))
408 i += 1;
409 else
410 {
411 int j;
412 tree base_binfos = BINFO_BASETYPES (base_binfo);
413 tree delta;
414
415 for (j = i+1; j < n_baselinks; j++)
416 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
417 {
418 /* The next basetype offset must take into account the space
419 between the classes, not just the size of each class. */
420 delta = size_binop (MINUS_EXPR,
421 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
422 BINFO_OFFSET (base_binfo));
423 break;
424 }
425
426 #if 0
427 if (BINFO_OFFSET_ZEROP (base_binfo))
428 BINFO_OFFSET (base_binfo) = offset;
429 else
430 BINFO_OFFSET (base_binfo)
431 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
432 #else
433 BINFO_OFFSET (base_binfo) = offset;
434 #endif
435 if (base_binfos)
436 {
437 int k;
438 tree chain = NULL_TREE;
439
440 /* Now unshare the structure beneath BASE_BINFO. */
441 for (k = TREE_VEC_LENGTH (base_binfos)-1;
442 k >= 0; k--)
443 {
444 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
445 if (! TREE_VIA_VIRTUAL (base_base_binfo))
446 TREE_VEC_ELT (base_binfos, k)
447 = make_binfo (BINFO_OFFSET (base_base_binfo),
448 base_base_binfo,
449 BINFO_VTABLE (base_base_binfo),
450 BINFO_VIRTUALS (base_base_binfo),
451 chain);
452 chain = TREE_VEC_ELT (base_binfos, k);
453 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
454 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
455 }
456 /* Now propagate the offset to the base types. */
457 propagate_binfo_offsets (base_binfo, offset);
458 }
459
460 /* Go to our next class that counts for offset propagation. */
461 i = j;
462 if (i < n_baselinks)
463 offset = size_binop (PLUS_EXPR, offset, delta);
464 }
465 }
466 }
467
468 /* Compute the actual offsets that our virtual base classes
469 will have *for this type*. This must be performed after
470 the fields are laid out, since virtual baseclasses must
471 lay down at the end of the record.
472
473 Returns the maximum number of virtual functions any of the virtual
474 baseclasses provide. */
475 int
476 layout_vbasetypes (rec, max)
477 tree rec;
478 int max;
479 {
480 /* Get all the virtual base types that this type uses.
481 The TREE_VALUE slot holds the virtual baseclass type. */
482 tree vbase_types = get_vbase_types (rec);
483
484 #ifdef STRUCTURE_SIZE_BOUNDARY
485 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
486 #else
487 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
488 #endif
489 int desired_align;
490
491 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
492 where CONST_SIZE is an integer
493 and VAR_SIZE is a tree expression.
494 If VAR_SIZE is null, the size is just CONST_SIZE.
495 Naturally we try to avoid using VAR_SIZE. */
496 register unsigned const_size = 0;
497 register tree var_size = 0;
498 int nonvirtual_const_size;
499 tree nonvirtual_var_size;
500
501 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
502
503 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
504 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
505 else
506 var_size = TYPE_SIZE (rec);
507
508 nonvirtual_const_size = const_size;
509 nonvirtual_var_size = var_size;
510
511 while (vbase_types)
512 {
513 tree basetype = BINFO_TYPE (vbase_types);
514 tree offset;
515
516 desired_align = TYPE_ALIGN (basetype);
517 record_align = MAX (record_align, desired_align);
518
519 if (const_size == 0)
520 offset = integer_zero_node;
521 else
522 {
523 /* Give each virtual base type the alignment it wants. */
524 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
525 * TYPE_ALIGN (basetype);
526 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
527 }
528
529 if (CLASSTYPE_VSIZE (basetype) > max)
530 max = CLASSTYPE_VSIZE (basetype);
531 BINFO_OFFSET (vbase_types) = offset;
532
533 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
534 const_size += MAX (BITS_PER_UNIT,
535 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
536 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
537 else if (var_size == 0)
538 var_size = TYPE_SIZE (basetype);
539 else
540 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
541
542 vbase_types = TREE_CHAIN (vbase_types);
543 }
544
545 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
546 here, as that is for this class, without any virtual base classes. */
547 TYPE_ALIGN (rec) = record_align;
548 if (const_size != nonvirtual_const_size)
549 {
550 CLASSTYPE_VBASE_SIZE (rec)
551 = size_int (const_size - nonvirtual_const_size);
552 TYPE_SIZE (rec) = size_int (const_size);
553 }
554
555 /* Now propagate offset information throughout the lattice
556 under the vbase type. */
557 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
558 vbase_types = TREE_CHAIN (vbase_types))
559 {
560 tree base_binfos = BINFO_BASETYPES (vbase_types);
561
562 if (base_binfos)
563 {
564 tree chain = NULL_TREE;
565 int j;
566 /* Now unshare the structure beneath BASE_BINFO. */
567
568 for (j = TREE_VEC_LENGTH (base_binfos)-1;
569 j >= 0; j--)
570 {
571 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
572 if (! TREE_VIA_VIRTUAL (base_base_binfo))
573 TREE_VEC_ELT (base_binfos, j)
574 = make_binfo (BINFO_OFFSET (base_base_binfo),
575 base_base_binfo,
576 BINFO_VTABLE (base_base_binfo),
577 BINFO_VIRTUALS (base_base_binfo),
578 chain);
579 chain = TREE_VEC_ELT (base_binfos, j);
580 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
581 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
582 }
583
584 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
585 }
586 }
587
588 return max;
589 }
590
591 /* Lay out the base types of a record type, REC.
592 Tentatively set the size and alignment of REC
593 according to the base types alone.
594
595 Offsets for immediate nonvirtual baseclasses are also computed here.
596
597 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
598 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
599
600 Returns list of virtual base classes in a FIELD_DECL chain. */
601 tree
602 layout_basetypes (rec, binfos)
603 tree rec, binfos;
604 {
605 /* Chain to hold all the new FIELD_DECLs which point at virtual
606 base classes. */
607 tree vbase_decls = NULL_TREE;
608
609 #ifdef STRUCTURE_SIZE_BOUNDARY
610 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
611 #else
612 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
613 #endif
614
615 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
616 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
617 the size is just CONST_SIZE. Naturally we try to avoid using
618 VAR_SIZE. And so far, we've been sucessful. */
619 #if 0
620 register tree var_size = 0;
621 #endif
622
623 register unsigned const_size = 0;
624 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
625
626 /* Handle basetypes almost like fields, but record their
627 offsets differently. */
628
629 for (i = 0; i < n_baseclasses; i++)
630 {
631 int inc, desired_align, int_vbase_size;
632 register tree base_binfo = TREE_VEC_ELT (binfos, i);
633 register tree basetype = BINFO_TYPE (base_binfo);
634 tree decl, offset;
635
636 if (TYPE_SIZE (basetype) == 0)
637 {
638 #if 0
639 /* This error is now reported in xref_tag, thus giving better
640 location information. */
641 error_with_aggr_type (base_binfo,
642 "base class `%s' has incomplete type");
643
644 TREE_VIA_PUBLIC (base_binfo) = 1;
645 TREE_VIA_PROTECTED (base_binfo) = 0;
646 TREE_VIA_VIRTUAL (base_binfo) = 0;
647
648 /* Should handle this better so that
649
650 class A;
651 class B: private A { virtual void F(); };
652
653 does not dump core when compiled. */
654 my_friendly_abort (121);
655 #endif
656 continue;
657 }
658
659 /* All basetypes are recorded in the association list of the
660 derived type. */
661
662 if (TREE_VIA_VIRTUAL (base_binfo))
663 {
664 int j;
665 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
666 + sizeof (VBASE_NAME) + 1);
667
668 /* The offset for a virtual base class is only used in computing
669 virtual function tables and for initializing virtual base
670 pointers. It is built once `get_vbase_types' is called. */
671
672 /* If this basetype can come from another vbase pointer
673 without an additional indirection, we will share
674 that pointer. If an indirection is involved, we
675 make our own pointer. */
676 for (j = 0; j < n_baseclasses; j++)
677 {
678 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
679 if (! TREE_VIA_VIRTUAL (other_base_binfo)
680 && binfo_member (basetype,
681 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
682 goto got_it;
683 }
684 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
685 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
686 build_pointer_type (basetype));
687 /* If you change any of the below, take a look at all the
688 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
689 them too. */
690 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
691 DECL_VIRTUAL_P (decl) = 1;
692 DECL_FIELD_CONTEXT (decl) = rec;
693 DECL_CLASS_CONTEXT (decl) = rec;
694 DECL_FCONTEXT (decl) = basetype;
695 DECL_FIELD_SIZE (decl) = 0;
696 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
697 TREE_CHAIN (decl) = vbase_decls;
698 BINFO_VPTR_FIELD (base_binfo) = decl;
699 vbase_decls = decl;
700
701 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
702 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
703 {
704 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
705 "destructor `%s' non-virtual");
706 warning ("in inheritance relationship `%s: virtual %s'",
707 TYPE_NAME_STRING (rec),
708 TYPE_NAME_STRING (basetype));
709 }
710 got_it:
711 /* The space this decl occupies has already been accounted for. */
712 continue;
713 }
714
715 if (const_size == 0)
716 offset = integer_zero_node;
717 else
718 {
719 /* Give each base type the alignment it wants. */
720 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
721 * TYPE_ALIGN (basetype);
722 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
723
724 #if 0
725 /* bpk: Disabled this check until someone is willing to
726 claim it as theirs and explain exactly what circumstances
727 warrant the warning. */
728 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
729 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
730 {
731 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
732 "destructor `%s' non-virtual");
733 warning ("in inheritance relationship `%s:%s %s'",
734 TYPE_NAME_STRING (rec),
735 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
736 TYPE_NAME_STRING (basetype));
737 }
738 #endif
739 }
740 BINFO_OFFSET (base_binfo) = offset;
741 if (CLASSTYPE_VSIZE (basetype))
742 {
743 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
744 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
745 }
746 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
747 TYPE_BINFO (rec) = base_binfo;
748
749 /* Add only the amount of storage not present in
750 the virtual baseclasses. */
751
752 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
753 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
754 {
755 inc = MAX (record_align,
756 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
757 - int_vbase_size));
758
759 /* Record must have at least as much alignment as any field. */
760 desired_align = TYPE_ALIGN (basetype);
761 record_align = MAX (record_align, desired_align);
762
763 const_size += inc;
764 }
765 }
766
767 if (const_size)
768 CLASSTYPE_SIZE (rec) = size_int (const_size);
769 else
770 CLASSTYPE_SIZE (rec) = integer_zero_node;
771 CLASSTYPE_ALIGN (rec) = record_align;
772
773 return vbase_decls;
774 }
775 \f
776 /* Hashing of lists so that we don't make duplicates.
777 The entry point is `list_hash_canon'. */
778
779 /* Each hash table slot is a bucket containing a chain
780 of these structures. */
781
782 struct list_hash
783 {
784 struct list_hash *next; /* Next structure in the bucket. */
785 int hashcode; /* Hash code of this list. */
786 tree list; /* The list recorded here. */
787 };
788
789 /* Now here is the hash table. When recording a list, it is added
790 to the slot whose index is the hash code mod the table size.
791 Note that the hash table is used for several kinds of lists.
792 While all these live in the same table, they are completely independent,
793 and the hash code is computed differently for each of these. */
794
795 #define TYPE_HASH_SIZE 59
796 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
797
798 /* Compute a hash code for a list (chain of TREE_LIST nodes
799 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
800 TREE_COMMON slots), by adding the hash codes of the individual entries. */
801
802 int
803 list_hash (list)
804 tree list;
805 {
806 register int hashcode = 0;
807
808 if (TREE_CHAIN (list))
809 hashcode += TYPE_HASH (TREE_CHAIN (list));
810
811 if (TREE_VALUE (list))
812 hashcode += TYPE_HASH (TREE_VALUE (list));
813 else
814 hashcode += 1007;
815 if (TREE_PURPOSE (list))
816 hashcode += TYPE_HASH (TREE_PURPOSE (list));
817 else
818 hashcode += 1009;
819 return hashcode;
820 }
821
822 /* Look in the type hash table for a type isomorphic to TYPE.
823 If one is found, return it. Otherwise return 0. */
824
825 tree
826 list_hash_lookup (hashcode, list)
827 int hashcode;
828 tree list;
829 {
830 register struct list_hash *h;
831 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
832 if (h->hashcode == hashcode
833 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
834 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
835 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
836 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
837 && TREE_VALUE (h->list) == TREE_VALUE (list)
838 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
839 {
840 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
841 return h->list;
842 }
843 return 0;
844 }
845
846 /* Add an entry to the list-hash-table
847 for a list TYPE whose hash code is HASHCODE. */
848
849 void
850 list_hash_add (hashcode, list)
851 int hashcode;
852 tree list;
853 {
854 register struct list_hash *h;
855
856 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
857 h->hashcode = hashcode;
858 h->list = list;
859 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
860 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
861 }
862
863 /* Given TYPE, and HASHCODE its hash code, return the canonical
864 object for an identical list if one already exists.
865 Otherwise, return TYPE, and record it as the canonical object
866 if it is a permanent object.
867
868 To use this function, first create a list of the sort you want.
869 Then compute its hash code from the fields of the list that
870 make it different from other similar lists.
871 Then call this function and use the value.
872 This function frees the list you pass in if it is a duplicate. */
873
874 /* Set to 1 to debug without canonicalization. Never set by program. */
875 static int debug_no_list_hash = 0;
876
877 tree
878 list_hash_canon (hashcode, list)
879 int hashcode;
880 tree list;
881 {
882 tree t1;
883
884 if (debug_no_list_hash)
885 return list;
886
887 t1 = list_hash_lookup (hashcode, list);
888 if (t1 != 0)
889 {
890 obstack_free (&class_obstack, list);
891 return t1;
892 }
893
894 /* If this is a new list, record it for later reuse. */
895 list_hash_add (hashcode, list);
896
897 return list;
898 }
899
900 tree
901 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
902 int via_public, via_virtual, via_protected;
903 tree purpose, value, chain;
904 {
905 struct obstack *ambient_obstack = current_obstack;
906 tree t;
907 int hashcode;
908
909 current_obstack = &class_obstack;
910 t = tree_cons (purpose, value, chain);
911 TREE_VIA_PUBLIC (t) = via_public;
912 TREE_VIA_PROTECTED (t) = via_protected;
913 TREE_VIA_VIRTUAL (t) = via_virtual;
914 hashcode = list_hash (t);
915 t = list_hash_canon (hashcode, t);
916 current_obstack = ambient_obstack;
917 return t;
918 }
919
920 /* Constructor for hashed lists. */
921 tree
922 hash_tree_chain (value, chain)
923 tree value, chain;
924 {
925 struct obstack *ambient_obstack = current_obstack;
926 tree t;
927 int hashcode;
928
929 current_obstack = &class_obstack;
930 t = tree_cons (NULL_TREE, value, chain);
931 hashcode = list_hash (t);
932 t = list_hash_canon (hashcode, t);
933 current_obstack = ambient_obstack;
934 return t;
935 }
936
937 /* Similar, but used for concatenating two lists. */
938 tree
939 hash_chainon (list1, list2)
940 tree list1, list2;
941 {
942 if (list2 == 0)
943 return list1;
944 if (list1 == 0)
945 return list2;
946 if (TREE_CHAIN (list1) == NULL_TREE)
947 return hash_tree_chain (TREE_VALUE (list1), list2);
948 return hash_tree_chain (TREE_VALUE (list1),
949 hash_chainon (TREE_CHAIN (list1), list2));
950 }
951
952 static tree
953 get_identifier_list (value)
954 tree value;
955 {
956 tree list = IDENTIFIER_AS_LIST (value);
957 if (list != NULL_TREE
958 && (TREE_CODE (list) != TREE_LIST
959 || TREE_VALUE (list) != value))
960 list = NULL_TREE;
961 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
962 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
963 && IDENTIFIER_TYPE_VALUE (value)
964 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
965 {
966 tree type = IDENTIFIER_TYPE_VALUE (value);
967
968 if (TYPE_PTRMEMFUNC_P (type))
969 list = NULL_TREE;
970 else if (type == current_class_type)
971 /* Don't mess up the constructor name. */
972 list = tree_cons (NULL_TREE, value, NULL_TREE);
973 else
974 {
975 register tree id;
976 /* This will return the correct thing for regular types,
977 nested types, and templates. Yay! */
978 if (TYPE_NESTED_NAME (type))
979 id = TYPE_NESTED_NAME (type);
980 else
981 id = TYPE_IDENTIFIER (type);
982
983 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
984 CLASSTYPE_ID_AS_LIST (type)
985 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
986 list = CLASSTYPE_ID_AS_LIST (type);
987 }
988 }
989 return list;
990 }
991
992 tree
993 get_decl_list (value)
994 tree value;
995 {
996 tree list = NULL_TREE;
997
998 if (TREE_CODE (value) == IDENTIFIER_NODE)
999 list = get_identifier_list (value);
1000 else if (TREE_CODE (value) == RECORD_TYPE
1001 && TYPE_LANG_SPECIFIC (value))
1002 list = CLASSTYPE_AS_LIST (value);
1003
1004 if (list != NULL_TREE)
1005 {
1006 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1007 return list;
1008 }
1009
1010 return build_decl_list (NULL_TREE, value);
1011 }
1012
1013 /* Look in the type hash table for a type isomorphic to
1014 `build_tree_list (NULL_TREE, VALUE)'.
1015 If one is found, return it. Otherwise return 0. */
1016
1017 tree
1018 list_hash_lookup_or_cons (value)
1019 tree value;
1020 {
1021 register int hashcode = TYPE_HASH (value);
1022 register struct list_hash *h;
1023 struct obstack *ambient_obstack;
1024 tree list = NULL_TREE;
1025
1026 if (TREE_CODE (value) == IDENTIFIER_NODE)
1027 list = get_identifier_list (value);
1028 else if (TREE_CODE (value) == TYPE_DECL
1029 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1030 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1031 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1032 else if (TREE_CODE (value) == RECORD_TYPE
1033 && TYPE_LANG_SPECIFIC (value))
1034 list = CLASSTYPE_AS_LIST (value);
1035
1036 if (list != NULL_TREE)
1037 {
1038 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1039 return list;
1040 }
1041
1042 if (debug_no_list_hash)
1043 return hash_tree_chain (value, NULL_TREE);
1044
1045 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1046 if (h->hashcode == hashcode
1047 && TREE_VIA_VIRTUAL (h->list) == 0
1048 && TREE_VIA_PUBLIC (h->list) == 0
1049 && TREE_VIA_PROTECTED (h->list) == 0
1050 && TREE_PURPOSE (h->list) == 0
1051 && TREE_VALUE (h->list) == value)
1052 {
1053 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1054 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1055 return h->list;
1056 }
1057
1058 ambient_obstack = current_obstack;
1059 current_obstack = &class_obstack;
1060 list = build_tree_list (NULL_TREE, value);
1061 list_hash_add (hashcode, list);
1062 current_obstack = ambient_obstack;
1063 return list;
1064 }
1065 \f
1066 /* Build an association between TYPE and some parameters:
1067
1068 OFFSET is the offset added to `this' to convert it to a pointer
1069 of type `TYPE *'
1070
1071 BINFO is the base binfo to use, if we are deriving from one. This
1072 is necessary, as we want specialized parent binfos from base
1073 classes, so that the VTABLE_NAMEs of bases are for the most derived
1074 type, instead of of the simple type.
1075
1076 VTABLE is the virtual function table with which to initialize
1077 sub-objects of type TYPE.
1078
1079 VIRTUALS are the virtual functions sitting in VTABLE.
1080
1081 CHAIN are more associations we must retain. */
1082
1083 tree
1084 make_binfo (offset, binfo, vtable, virtuals, chain)
1085 tree offset, binfo;
1086 tree vtable, virtuals;
1087 tree chain;
1088 {
1089 tree new_binfo = make_tree_vec (6);
1090 tree type;
1091
1092 if (TREE_CODE (binfo) == TREE_VEC)
1093 type = BINFO_TYPE (binfo);
1094 else
1095 {
1096 type = binfo;
1097 binfo = TYPE_BINFO (binfo);
1098 }
1099
1100 TREE_CHAIN (new_binfo) = chain;
1101 if (chain)
1102 TREE_USED (new_binfo) = TREE_USED (chain);
1103
1104 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1105 BINFO_OFFSET (new_binfo) = offset;
1106 BINFO_VTABLE (new_binfo) = vtable;
1107 BINFO_VIRTUALS (new_binfo) = virtuals;
1108 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1109
1110 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1111 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1112 return new_binfo;
1113 }
1114
1115 tree
1116 copy_binfo (list)
1117 tree list;
1118 {
1119 tree binfo = copy_list (list);
1120 tree rval = binfo;
1121 while (binfo)
1122 {
1123 TREE_USED (binfo) = 0;
1124 if (BINFO_BASETYPES (binfo))
1125 BINFO_BASETYPES (binfo) = copy_node (BINFO_BASETYPES (binfo));
1126 binfo = TREE_CHAIN (binfo);
1127 }
1128 return rval;
1129 }
1130
1131 /* Return the binfo value for ELEM in TYPE. */
1132
1133 tree
1134 binfo_value (elem, type)
1135 tree elem;
1136 tree type;
1137 {
1138 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1139 compiler_error ("base class `%s' ambiguous in binfo_value",
1140 TYPE_NAME_STRING (elem));
1141 if (elem == type)
1142 return TYPE_BINFO (type);
1143 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1144 return type;
1145 return get_binfo (elem, type, 0);
1146 }
1147
1148 tree
1149 reverse_path (path)
1150 tree path;
1151 {
1152 register tree prev = 0, tmp, next;
1153 for (tmp = path; tmp; tmp = next)
1154 {
1155 next = BINFO_INHERITANCE_CHAIN (tmp);
1156 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1157 prev = tmp;
1158 }
1159 return prev;
1160 }
1161
1162 tree
1163 virtual_member (elem, list)
1164 tree elem;
1165 tree list;
1166 {
1167 tree t;
1168 tree rval, nval;
1169
1170 for (t = list; t; t = TREE_CHAIN (t))
1171 if (elem == BINFO_TYPE (t))
1172 return t;
1173 rval = 0;
1174 for (t = list; t; t = TREE_CHAIN (t))
1175 {
1176 tree binfos = BINFO_BASETYPES (t);
1177 int i;
1178
1179 if (binfos != NULL_TREE)
1180 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1181 {
1182 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1183 if (nval)
1184 {
1185 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1186 my_friendly_abort (104);
1187 rval = nval;
1188 }
1189 }
1190 }
1191 return rval;
1192 }
1193
1194 /* Return the offset (as an INTEGER_CST) for ELEM in LIST.
1195 INITIAL_OFFSET is the value to add to the offset that ELEM's
1196 binfo entry in LIST provides.
1197
1198 Returns NULL if ELEM does not have an binfo value in LIST. */
1199
1200 tree
1201 virtual_offset (elem, list, initial_offset)
1202 tree elem;
1203 tree list;
1204 tree initial_offset;
1205 {
1206 tree vb, offset;
1207 tree rval, nval;
1208
1209 for (vb = list; vb; vb = TREE_CHAIN (vb))
1210 if (elem == BINFO_TYPE (vb))
1211 return size_binop (PLUS_EXPR, initial_offset, BINFO_OFFSET (vb));
1212 rval = 0;
1213 for (vb = list; vb; vb = TREE_CHAIN (vb))
1214 {
1215 tree binfos = BINFO_BASETYPES (vb);
1216 int i;
1217
1218 if (binfos == NULL_TREE)
1219 continue;
1220
1221 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1222 {
1223 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1224 if (nval)
1225 {
1226 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1227 my_friendly_abort (105);
1228 offset = BINFO_OFFSET (vb);
1229 rval = nval;
1230 }
1231 }
1232 }
1233 if (rval == NULL_TREE)
1234 return rval;
1235 return size_binop (PLUS_EXPR, offset, BINFO_OFFSET (rval));
1236 }
1237
1238 void
1239 debug_binfo (elem)
1240 tree elem;
1241 {
1242 int i;
1243 tree virtuals;
1244
1245 fprintf (stderr, "type \"%s\"; offset = %d\n",
1246 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1247 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1248 fprintf (stderr, "vtable type:\n");
1249 debug_tree (BINFO_TYPE (elem));
1250 if (BINFO_VTABLE (elem))
1251 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1252 else
1253 fprintf (stderr, "no vtable decl yet\n");
1254 fprintf (stderr, "virtuals:\n");
1255 virtuals = BINFO_VIRTUALS (elem);
1256 if (virtuals != 0)
1257 {
1258 virtuals = TREE_CHAIN (virtuals);
1259 if (flag_dossier)
1260 virtuals = TREE_CHAIN (virtuals);
1261 }
1262 i = 1;
1263 while (virtuals)
1264 {
1265 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1266 fprintf (stderr, "%s [%d =? %d]\n",
1267 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1268 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1269 virtuals = TREE_CHAIN (virtuals);
1270 i += 1;
1271 }
1272 }
1273
1274 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1275 We expect a null pointer to mark the end of the chain.
1276 This is the Lisp primitive `length'. */
1277
1278 int
1279 decl_list_length (t)
1280 tree t;
1281 {
1282 register tree tail;
1283 register int len = 0;
1284
1285 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1286 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1287 for (tail = t; tail; tail = DECL_CHAIN (tail))
1288 len++;
1289
1290 return len;
1291 }
1292
1293 int
1294 count_functions (t)
1295 tree t;
1296 {
1297 if (TREE_CODE (t) == FUNCTION_DECL)
1298 return 1;
1299
1300 return decl_list_length (TREE_VALUE (t));
1301 }
1302
1303 /* Like value_member, but for DECL_CHAINs. */
1304 tree
1305 decl_value_member (elem, list)
1306 tree elem, list;
1307 {
1308 while (list)
1309 {
1310 if (elem == list)
1311 return list;
1312 list = DECL_CHAIN (list);
1313 }
1314 return NULL_TREE;
1315 }
1316
1317 int
1318 is_overloaded_fn (x)
1319 tree x;
1320 {
1321 if (TREE_CODE (x) == FUNCTION_DECL)
1322 return 1;
1323
1324 if (TREE_CODE (x) == TREE_LIST
1325 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1326 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1327 return 1;
1328
1329 return 0;
1330 }
1331
1332 int
1333 really_overloaded_fn (x)
1334 tree x;
1335 {
1336 if (TREE_CODE (x) == TREE_LIST
1337 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1338 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1339 return 1;
1340
1341 return 0;
1342 }
1343
1344 tree
1345 get_first_fn (from)
1346 tree from;
1347 {
1348 if (TREE_CODE (from) == FUNCTION_DECL)
1349 return from;
1350
1351 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1352
1353 return TREE_VALUE (from);
1354 }
1355
1356 tree
1357 fnaddr_from_vtable_entry (entry)
1358 tree entry;
1359 {
1360 if (flag_vtable_thunks)
1361 {
1362 tree func = entry;
1363 if (TREE_CODE (func) == ADDR_EXPR)
1364 func = TREE_OPERAND (func, 0);
1365 if (TREE_CODE (func) == THUNK_DECL)
1366 return DECL_INITIAL (func);
1367 else
1368 return entry;
1369 }
1370 else
1371 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1372 }
1373
1374 void
1375 set_fnaddr_from_vtable_entry (entry, value)
1376 tree entry, value;
1377 {
1378 if (flag_vtable_thunks)
1379 abort ();
1380 else
1381 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1382 }
1383
1384 tree
1385 function_arg_chain (t)
1386 tree t;
1387 {
1388 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1389 }
1390
1391 int
1392 promotes_to_aggr_type (t, code)
1393 tree t;
1394 enum tree_code code;
1395 {
1396 if (TREE_CODE (t) == code)
1397 t = TREE_TYPE (t);
1398 return IS_AGGR_TYPE (t);
1399 }
1400
1401 int
1402 is_aggr_type_2 (t1, t2)
1403 tree t1, t2;
1404 {
1405 if (TREE_CODE (t1) != TREE_CODE (t2))
1406 return 0;
1407 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1408 }
1409
1410 /* Give message using types TYPE1 and TYPE2 as arguments.
1411 PFN is the function which will print the message;
1412 S is the format string for PFN to use. */
1413 void
1414 message_2_types (pfn, s, type1, type2)
1415 void (*pfn) ();
1416 char *s;
1417 tree type1, type2;
1418 {
1419 tree name1 = TYPE_NAME (type1);
1420 tree name2 = TYPE_NAME (type2);
1421 if (TREE_CODE (name1) == TYPE_DECL)
1422 name1 = DECL_NAME (name1);
1423 if (TREE_CODE (name2) == TYPE_DECL)
1424 name2 = DECL_NAME (name2);
1425 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1426 }
1427 \f
1428 #define PRINT_RING_SIZE 4
1429
1430 char *
1431 lang_printable_name (decl)
1432 tree decl;
1433 {
1434 static tree decl_ring[PRINT_RING_SIZE];
1435 static char *print_ring[PRINT_RING_SIZE];
1436 static int ring_counter;
1437 int i;
1438
1439 /* Only cache functions. */
1440 if (TREE_CODE (decl) != FUNCTION_DECL
1441 || DECL_LANG_SPECIFIC (decl) == 0)
1442 return decl_as_string (decl, 1);
1443
1444 /* See if this print name is lying around. */
1445 for (i = 0; i < PRINT_RING_SIZE; i++)
1446 if (decl_ring[i] == decl)
1447 /* yes, so return it. */
1448 return print_ring[i];
1449
1450 if (++ring_counter == PRINT_RING_SIZE)
1451 ring_counter = 0;
1452
1453 if (current_function_decl != NULL_TREE)
1454 {
1455 if (decl_ring[ring_counter] == current_function_decl)
1456 ring_counter += 1;
1457 if (ring_counter == PRINT_RING_SIZE)
1458 ring_counter = 0;
1459 if (decl_ring[ring_counter] == current_function_decl)
1460 my_friendly_abort (106);
1461 }
1462
1463 if (print_ring[ring_counter])
1464 free (print_ring[ring_counter]);
1465
1466 {
1467 int print_ret_type_p
1468 = (!DECL_CONSTRUCTOR_P (decl)
1469 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1470
1471 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1472 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1473 strcpy (print_ring[ring_counter], name);
1474 decl_ring[ring_counter] = decl;
1475 }
1476 return print_ring[ring_counter];
1477 }
1478 \f
1479 /* Comparison function for sorting identifiers in RAISES lists.
1480 Note that because IDENTIFIER_NODEs are unique, we can sort
1481 them by address, saving an indirection. */
1482 static int
1483 id_cmp (p1, p2)
1484 tree *p1, *p2;
1485 {
1486 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1487 }
1488
1489 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1490 listed in RAISES. */
1491 tree
1492 build_exception_variant (ctype, type, raises)
1493 tree ctype, type;
1494 tree raises;
1495 {
1496 int i;
1497 tree v = TYPE_MAIN_VARIANT (type);
1498 tree t, t2, cname;
1499 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1500 int constp = TYPE_READONLY (type);
1501 int volatilep = TYPE_VOLATILE (type);
1502
1503 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1504 {
1505 if (TYPE_READONLY (v) != constp
1506 || TYPE_VOLATILE (v) != volatilep)
1507 continue;
1508
1509 t = raises;
1510 t2 = TYPE_RAISES_EXCEPTIONS (v);
1511 while (t && t2)
1512 {
1513 if (TREE_TYPE (t) == TREE_TYPE (t2))
1514 {
1515 t = TREE_CHAIN (t);
1516 t2 = TREE_CHAIN (t2);
1517 }
1518 else break;
1519 }
1520 if (t || t2)
1521 continue;
1522 /* List of exceptions raised matches previously found list.
1523
1524 @@ Nice to free up storage used in consing up the
1525 @@ list of exceptions raised. */
1526 return v;
1527 }
1528
1529 /* Need to build a new variant. */
1530 v = copy_node (type);
1531 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1532 TYPE_NEXT_VARIANT (type) = v;
1533 if (raises && ! TREE_PERMANENT (raises))
1534 {
1535 push_obstacks_nochange ();
1536 end_temporary_allocation ();
1537 raises = copy_list (raises);
1538 pop_obstacks ();
1539 }
1540 TYPE_RAISES_EXCEPTIONS (v) = raises;
1541 return v;
1542 }
1543
1544 /* Subroutine of copy_to_permanent
1545
1546 Assuming T is a node build bottom-up, make it all exist on
1547 permanent obstack, if it is not permanent already. */
1548 static tree
1549 make_deep_copy (t)
1550 tree t;
1551 {
1552 enum tree_code code;
1553
1554 if (t == NULL_TREE || TREE_PERMANENT (t))
1555 return t;
1556
1557 switch (code = TREE_CODE (t))
1558 {
1559 case ERROR_MARK:
1560 return error_mark_node;
1561
1562 case VAR_DECL:
1563 case FUNCTION_DECL:
1564 case CONST_DECL:
1565 break;
1566
1567 case PARM_DECL:
1568 {
1569 tree chain = TREE_CHAIN (t);
1570 t = copy_node (t);
1571 TREE_CHAIN (t) = make_deep_copy (chain);
1572 TREE_TYPE (t) = make_deep_copy (TREE_TYPE (t));
1573 DECL_INITIAL (t) = make_deep_copy (DECL_INITIAL (t));
1574 DECL_SIZE (t) = make_deep_copy (DECL_SIZE (t));
1575 return t;
1576 }
1577
1578 case TREE_LIST:
1579 {
1580 tree chain = TREE_CHAIN (t);
1581 t = copy_node (t);
1582 TREE_PURPOSE (t) = make_deep_copy (TREE_PURPOSE (t));
1583 TREE_VALUE (t) = make_deep_copy (TREE_VALUE (t));
1584 TREE_CHAIN (t) = make_deep_copy (chain);
1585 return t;
1586 }
1587
1588 case TREE_VEC:
1589 {
1590 int len = TREE_VEC_LENGTH (t);
1591
1592 t = copy_node (t);
1593 while (len--)
1594 TREE_VEC_ELT (t, len) = make_deep_copy (TREE_VEC_ELT (t, len));
1595 return t;
1596 }
1597
1598 case INTEGER_CST:
1599 case REAL_CST:
1600 case STRING_CST:
1601 return copy_node (t);
1602
1603 case COND_EXPR:
1604 case TARGET_EXPR:
1605 case NEW_EXPR:
1606 t = copy_node (t);
1607 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1608 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1609 TREE_OPERAND (t, 2) = make_deep_copy (TREE_OPERAND (t, 2));
1610 return t;
1611
1612 case SAVE_EXPR:
1613 t = copy_node (t);
1614 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1615 return t;
1616
1617 case MODIFY_EXPR:
1618 case PLUS_EXPR:
1619 case MINUS_EXPR:
1620 case MULT_EXPR:
1621 case TRUNC_DIV_EXPR:
1622 case TRUNC_MOD_EXPR:
1623 case MIN_EXPR:
1624 case MAX_EXPR:
1625 case LSHIFT_EXPR:
1626 case RSHIFT_EXPR:
1627 case BIT_IOR_EXPR:
1628 case BIT_XOR_EXPR:
1629 case BIT_AND_EXPR:
1630 case BIT_ANDTC_EXPR:
1631 case TRUTH_ANDIF_EXPR:
1632 case TRUTH_ORIF_EXPR:
1633 case LT_EXPR:
1634 case LE_EXPR:
1635 case GT_EXPR:
1636 case GE_EXPR:
1637 case EQ_EXPR:
1638 case NE_EXPR:
1639 case CEIL_DIV_EXPR:
1640 case FLOOR_DIV_EXPR:
1641 case ROUND_DIV_EXPR:
1642 case CEIL_MOD_EXPR:
1643 case FLOOR_MOD_EXPR:
1644 case ROUND_MOD_EXPR:
1645 case COMPOUND_EXPR:
1646 case PREDECREMENT_EXPR:
1647 case PREINCREMENT_EXPR:
1648 case POSTDECREMENT_EXPR:
1649 case POSTINCREMENT_EXPR:
1650 case CALL_EXPR:
1651 t = copy_node (t);
1652 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1653 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1654 return t;
1655
1656 case CONVERT_EXPR:
1657 case ADDR_EXPR:
1658 case INDIRECT_REF:
1659 case NEGATE_EXPR:
1660 case BIT_NOT_EXPR:
1661 case TRUTH_NOT_EXPR:
1662 case NOP_EXPR:
1663 case COMPONENT_REF:
1664 t = copy_node (t);
1665 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1666 return t;
1667
1668 /* This list is incomplete, but should suffice for now.
1669 It is very important that `sorry' does not call
1670 `report_error_function'. That could cause an infinite loop. */
1671 default:
1672 sorry ("initializer contains unrecognized tree code");
1673 return error_mark_node;
1674
1675 }
1676 my_friendly_abort (107);
1677 /* NOTREACHED */
1678 return NULL_TREE;
1679 }
1680
1681 /* Assuming T is a node built bottom-up, make it all exist on
1682 permanent obstack, if it is not permanent already. */
1683 tree
1684 copy_to_permanent (t)
1685 tree t;
1686 {
1687 register struct obstack *ambient_obstack = current_obstack;
1688 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1689
1690 if (t == NULL_TREE || TREE_PERMANENT (t))
1691 return t;
1692
1693 saveable_obstack = &permanent_obstack;
1694 current_obstack = saveable_obstack;
1695
1696 t = make_deep_copy (t);
1697
1698 current_obstack = ambient_obstack;
1699 saveable_obstack = ambient_saveable_obstack;
1700
1701 return t;
1702 }
1703
1704 void
1705 print_lang_statistics ()
1706 {
1707 extern struct obstack maybepermanent_obstack;
1708 print_obstack_statistics ("class_obstack", &class_obstack);
1709 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1710 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1711 print_search_statistics ();
1712 print_class_statistics ();
1713 }
1714
1715 /* This is used by the `assert' macro. It is provided in libgcc.a,
1716 which `cc' doesn't know how to link. Note that the C++ front-end
1717 no longer actually uses the `assert' macro (instead, it calls
1718 my_friendly_assert). But all of the back-end files still need this. */
1719 void
1720 __eprintf (string, expression, line, filename)
1721 #ifdef __STDC__
1722 const char *string;
1723 const char *expression;
1724 unsigned line;
1725 const char *filename;
1726 #else
1727 char *string;
1728 char *expression;
1729 unsigned line;
1730 char *filename;
1731 #endif
1732 {
1733 fprintf (stderr, string, expression, line, filename);
1734 fflush (stderr);
1735 abort ();
1736 }
1737
1738 /* Return, as an INTEGER_CST node, the number of elements for
1739 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1740
1741 tree
1742 array_type_nelts_top (type)
1743 tree type;
1744 {
1745 return fold (build (PLUS_EXPR, integer_type_node,
1746 array_type_nelts (type),
1747 integer_one_node));
1748 }
1749
1750 /* Return, as an INTEGER_CST node, the number of elements for
1751 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1752 ARRAY_TYPEs that are clumped together. */
1753
1754 tree
1755 array_type_nelts_total (type)
1756 tree type;
1757 {
1758 tree sz = array_type_nelts_top (type);
1759 type = TREE_TYPE (type);
1760 while (TREE_CODE (type) == ARRAY_TYPE)
1761 {
1762 tree n = array_type_nelts_top (type);
1763 sz = fold (build (MULT_EXPR, integer_type_node, sz, n));
1764 type = TREE_TYPE (type);
1765 }
1766 return sz;
1767 }