Rename called function.
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29
30 #define CEIL(x,y) (((x) + (y) - 1) / (y))
31
32 /* Return nonzero if REF is an lvalue valid for this language.
33 Lvalues can be assigned, unless they have TREE_READONLY.
34 Lvalues can have their address taken, unless they have DECL_REGISTER. */
35
36 int
37 real_lvalue_p (ref)
38 tree ref;
39 {
40 if (! language_lvalue_valid (ref))
41 return 0;
42
43 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
44 return 1;
45
46 if (ref == current_class_decl && flag_this_is_variable <= 0)
47 return 0;
48
49 switch (TREE_CODE (ref))
50 {
51 /* preincrements and predecrements are valid lvals, provided
52 what they refer to are valid lvals. */
53 case PREINCREMENT_EXPR:
54 case PREDECREMENT_EXPR:
55 case COMPONENT_REF:
56 case SAVE_EXPR:
57 return real_lvalue_p (TREE_OPERAND (ref, 0));
58
59 case STRING_CST:
60 return 1;
61
62 case VAR_DECL:
63 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
64 && DECL_LANG_SPECIFIC (ref)
65 && DECL_IN_AGGR_P (ref))
66 return 0;
67 case INDIRECT_REF:
68 case ARRAY_REF:
69 case PARM_DECL:
70 case RESULT_DECL:
71 case ERROR_MARK:
72 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
73 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
74 return 1;
75 break;
76
77 case WITH_CLEANUP_EXPR:
78 return real_lvalue_p (TREE_OPERAND (ref, 0));
79
80 /* A currently unresolved scope ref. */
81 case SCOPE_REF:
82 my_friendly_abort (103);
83 case OFFSET_REF:
84 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
85 return 1;
86 return real_lvalue_p (TREE_OPERAND (ref, 0))
87 && real_lvalue_p (TREE_OPERAND (ref, 1));
88 break;
89
90 case COND_EXPR:
91 return (real_lvalue_p (TREE_OPERAND (ref, 1))
92 && real_lvalue_p (TREE_OPERAND (ref, 2)));
93
94 case MODIFY_EXPR:
95 return 1;
96
97 case COMPOUND_EXPR:
98 return real_lvalue_p (TREE_OPERAND (ref, 1));
99
100 case MAX_EXPR:
101 case MIN_EXPR:
102 return (real_lvalue_p (TREE_OPERAND (ref, 0))
103 && real_lvalue_p (TREE_OPERAND (ref, 1)));
104 }
105
106 return 0;
107 }
108
109 int
110 lvalue_p (ref)
111 tree ref;
112 {
113 if (! language_lvalue_valid (ref))
114 return 0;
115
116 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
117 return 1;
118
119 if (ref == current_class_decl && flag_this_is_variable <= 0)
120 return 0;
121
122 switch (TREE_CODE (ref))
123 {
124 /* preincrements and predecrements are valid lvals, provided
125 what they refer to are valid lvals. */
126 case PREINCREMENT_EXPR:
127 case PREDECREMENT_EXPR:
128 case COMPONENT_REF:
129 case SAVE_EXPR:
130 return lvalue_p (TREE_OPERAND (ref, 0));
131
132 case STRING_CST:
133 return 1;
134
135 case VAR_DECL:
136 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
137 && DECL_LANG_SPECIFIC (ref)
138 && DECL_IN_AGGR_P (ref))
139 return 0;
140 case INDIRECT_REF:
141 case ARRAY_REF:
142 case PARM_DECL:
143 case RESULT_DECL:
144 case ERROR_MARK:
145 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
146 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
147 return 1;
148 break;
149
150 case WITH_CLEANUP_EXPR:
151 return lvalue_p (TREE_OPERAND (ref, 0));
152
153 case TARGET_EXPR:
154 return 1;
155
156 case CALL_EXPR:
157 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
158 return 1;
159 break;
160
161 /* A currently unresolved scope ref. */
162 case SCOPE_REF:
163 my_friendly_abort (103);
164 case OFFSET_REF:
165 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
166 return 1;
167 return lvalue_p (TREE_OPERAND (ref, 0))
168 && lvalue_p (TREE_OPERAND (ref, 1));
169 break;
170
171 case COND_EXPR:
172 return (lvalue_p (TREE_OPERAND (ref, 1))
173 && lvalue_p (TREE_OPERAND (ref, 2)));
174
175 case MODIFY_EXPR:
176 return 1;
177
178 case COMPOUND_EXPR:
179 return lvalue_p (TREE_OPERAND (ref, 1));
180
181 case MAX_EXPR:
182 case MIN_EXPR:
183 return (lvalue_p (TREE_OPERAND (ref, 0))
184 && lvalue_p (TREE_OPERAND (ref, 1)));
185 }
186
187 return 0;
188 }
189
190 /* Return nonzero if REF is an lvalue valid for this language;
191 otherwise, print an error message and return zero. */
192
193 int
194 lvalue_or_else (ref, string)
195 tree ref;
196 char *string;
197 {
198 int win = lvalue_p (ref);
199 if (! win)
200 error ("non-lvalue in %s", string);
201 return win;
202 }
203
204 /* INIT is a CALL_EXPR which needs info about its target.
205 TYPE is the type that this initialization should appear to have.
206
207 Build an encapsulation of the initialization to perform
208 and return it so that it can be processed by language-independent
209 and language-specific expression expanders.
210
211 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
212 Otherwise, cleanups are not built here. For example, when building
213 an initialization for a stack slot, since the called function handles
214 the cleanup, we would not want to do it here. */
215 tree
216 build_cplus_new (type, init, with_cleanup_p)
217 tree type;
218 tree init;
219 int with_cleanup_p;
220 {
221 tree slot;
222 tree rval;
223
224 slot = build (VAR_DECL, type);
225 layout_decl (slot, 0);
226 rval = build (NEW_EXPR, type,
227 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
228 TREE_SIDE_EFFECTS (rval) = 1;
229 TREE_ADDRESSABLE (rval) = 1;
230 rval = build (TARGET_EXPR, type, slot, rval, 0);
231 TREE_SIDE_EFFECTS (rval) = 1;
232 TREE_ADDRESSABLE (rval) = 1;
233
234 #if 0
235 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
236 {
237 TREE_OPERAND (rval, 2) = error_mark_node;
238 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
239 build_delete (TYPE_POINTER_TO (type),
240 build_unary_op (ADDR_EXPR, slot, 0),
241 integer_two_node,
242 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
243 TREE_SIDE_EFFECTS (rval) = 1;
244 TREE_ADDRESSABLE (rval) = 1;
245 }
246 #endif
247 return rval;
248 }
249
250 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
251 these CALL_EXPRs with tree nodes that will perform the cleanups. */
252
253 tree
254 break_out_cleanups (exp)
255 tree exp;
256 {
257 tree tmp = exp;
258
259 if (TREE_CODE (tmp) == CALL_EXPR
260 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
261 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
262
263 while (TREE_CODE (tmp) == NOP_EXPR
264 || TREE_CODE (tmp) == CONVERT_EXPR
265 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
266 {
267 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
268 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
269 {
270 TREE_OPERAND (tmp, 0)
271 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
272 TREE_OPERAND (tmp, 0), 1);
273 break;
274 }
275 else
276 tmp = TREE_OPERAND (tmp, 0);
277 }
278 return exp;
279 }
280
281 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
282 copies where they are found. Returns a deep copy all nodes transitively
283 containing CALL_EXPRs. */
284
285 tree
286 break_out_calls (exp)
287 tree exp;
288 {
289 register tree t1, t2;
290 register enum tree_code code;
291 register int changed = 0;
292 register int i;
293
294 if (exp == NULL_TREE)
295 return exp;
296
297 code = TREE_CODE (exp);
298
299 if (code == CALL_EXPR)
300 return copy_node (exp);
301
302 /* Don't try and defeat a save_expr, as it should only be done once. */
303 if (code == SAVE_EXPR)
304 return exp;
305
306 switch (TREE_CODE_CLASS (code))
307 {
308 default:
309 abort ();
310
311 case 'c': /* a constant */
312 case 't': /* a type node */
313 case 'x': /* something random, like an identifier or an ERROR_MARK. */
314 return exp;
315
316 case 'd': /* A decl node */
317 #if 0 /* This is bogus. jason 9/21/94 */
318
319 t1 = break_out_calls (DECL_INITIAL (exp));
320 if (t1 != DECL_INITIAL (exp))
321 {
322 exp = copy_node (exp);
323 DECL_INITIAL (exp) = t1;
324 }
325 #endif
326 return exp;
327
328 case 'b': /* A block node */
329 {
330 /* Don't know how to handle these correctly yet. Must do a
331 break_out_calls on all DECL_INITIAL values for local variables,
332 and also break_out_calls on all sub-blocks and sub-statements. */
333 abort ();
334 }
335 return exp;
336
337 case 'e': /* an expression */
338 case 'r': /* a reference */
339 case 's': /* an expression with side effects */
340 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
341 {
342 t1 = break_out_calls (TREE_OPERAND (exp, i));
343 if (t1 != TREE_OPERAND (exp, i))
344 {
345 exp = copy_node (exp);
346 TREE_OPERAND (exp, i) = t1;
347 }
348 }
349 return exp;
350
351 case '<': /* a comparison expression */
352 case '2': /* a binary arithmetic expression */
353 t2 = break_out_calls (TREE_OPERAND (exp, 1));
354 if (t2 != TREE_OPERAND (exp, 1))
355 changed = 1;
356 case '1': /* a unary arithmetic expression */
357 t1 = break_out_calls (TREE_OPERAND (exp, 0));
358 if (t1 != TREE_OPERAND (exp, 0))
359 changed = 1;
360 if (changed)
361 {
362 if (tree_code_length[(int) code] == 1)
363 return build1 (code, TREE_TYPE (exp), t1);
364 else
365 return build (code, TREE_TYPE (exp), t1, t2);
366 }
367 return exp;
368 }
369
370 }
371 \f
372 extern struct obstack *current_obstack;
373 extern struct obstack permanent_obstack, class_obstack;
374 extern struct obstack *saveable_obstack;
375
376 /* Here is how primitive or already-canonicalized types' hash
377 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
378 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
379
380 /* Construct, lay out and return the type of methods belonging to class
381 BASETYPE and whose arguments are described by ARGTYPES and whose values
382 are described by RETTYPE. If each type exists already, reuse it. */
383 tree
384 build_cplus_method_type (basetype, rettype, argtypes)
385 tree basetype, rettype, argtypes;
386 {
387 register tree t;
388 tree ptype;
389 int hashcode;
390
391 /* Make a node of the sort we want. */
392 t = make_node (METHOD_TYPE);
393
394 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
395 TREE_TYPE (t) = rettype;
396 if (IS_SIGNATURE (basetype))
397 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
398 TYPE_READONLY (basetype),
399 TYPE_VOLATILE (basetype));
400 else
401 ptype = build_pointer_type (basetype);
402
403 /* The actual arglist for this function includes a "hidden" argument
404 which is "this". Put it into the list of argument types. */
405
406 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
407 TYPE_ARG_TYPES (t) = argtypes;
408 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
409
410 /* If we already have such a type, use the old one and free this one.
411 Note that it also frees up the above cons cell if found. */
412 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
413 t = type_hash_canon (hashcode, t);
414
415 if (TYPE_SIZE (t) == 0)
416 layout_type (t);
417
418 return t;
419 }
420
421 tree
422 build_cplus_staticfn_type (basetype, rettype, argtypes)
423 tree basetype, rettype, argtypes;
424 {
425 register tree t;
426 int hashcode;
427
428 /* Make a node of the sort we want. */
429 t = make_node (FUNCTION_TYPE);
430
431 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
432 TREE_TYPE (t) = rettype;
433
434 TYPE_ARG_TYPES (t) = argtypes;
435
436 /* If we already have such a type, use the old one and free this one.
437 Note that it also frees up the above cons cell if found. */
438 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
439 t = type_hash_canon (hashcode, t);
440
441 if (TYPE_SIZE (t) == 0)
442 layout_type (t);
443
444 return t;
445 }
446
447 tree
448 build_cplus_array_type (elt_type, index_type)
449 tree elt_type;
450 tree index_type;
451 {
452 register struct obstack *ambient_obstack = current_obstack;
453 register struct obstack *ambient_saveable_obstack = saveable_obstack;
454 tree t;
455
456 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
457 make this permanent too. */
458 if (TREE_PERMANENT (elt_type)
459 && (index_type == 0 || TREE_PERMANENT (index_type)))
460 {
461 current_obstack = &permanent_obstack;
462 saveable_obstack = &permanent_obstack;
463 }
464
465 t = build_array_type (elt_type, index_type);
466
467 /* Push these needs up so that initialization takes place
468 more easily. */
469 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
470 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
471 current_obstack = ambient_obstack;
472 saveable_obstack = ambient_saveable_obstack;
473 return t;
474 }
475 \f
476 /* Make a variant type in the proper way for C/C++, propagating qualifiers
477 down to the element type of an array. */
478
479 tree
480 cp_build_type_variant (type, constp, volatilep)
481 tree type;
482 int constp, volatilep;
483 {
484 if (TREE_CODE (type) == ARRAY_TYPE)
485 {
486 tree real_main_variant = TYPE_MAIN_VARIANT (type);
487
488 push_obstacks (TYPE_OBSTACK (real_main_variant),
489 TYPE_OBSTACK (real_main_variant));
490 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
491 constp, volatilep),
492 TYPE_DOMAIN (type));
493
494 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
495 make a copy. (TYPE might have come from the hash table and
496 REAL_MAIN_VARIANT might be in some function's obstack.) */
497
498 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
499 {
500 type = copy_node (type);
501 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
502 }
503
504 TYPE_MAIN_VARIANT (type) = real_main_variant;
505 pop_obstacks ();
506 }
507 return build_type_variant (type, constp, volatilep);
508 }
509 \f
510 /* Add OFFSET to all base types of T.
511
512 OFFSET, which is a type offset, is number of bytes.
513
514 Note that we don't have to worry about having two paths to the
515 same base type, since this type owns its association list. */
516 void
517 propagate_binfo_offsets (binfo, offset)
518 tree binfo;
519 tree offset;
520 {
521 tree binfos = BINFO_BASETYPES (binfo);
522 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
523
524 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
525 {
526 tree base_binfo = TREE_VEC_ELT (binfos, i);
527
528 if (TREE_VIA_VIRTUAL (base_binfo))
529 i += 1;
530 else
531 {
532 int j;
533 tree base_binfos = BINFO_BASETYPES (base_binfo);
534 tree delta;
535
536 for (j = i+1; j < n_baselinks; j++)
537 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
538 {
539 /* The next basetype offset must take into account the space
540 between the classes, not just the size of each class. */
541 delta = size_binop (MINUS_EXPR,
542 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
543 BINFO_OFFSET (base_binfo));
544 break;
545 }
546
547 #if 0
548 if (BINFO_OFFSET_ZEROP (base_binfo))
549 BINFO_OFFSET (base_binfo) = offset;
550 else
551 BINFO_OFFSET (base_binfo)
552 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
553 #else
554 BINFO_OFFSET (base_binfo) = offset;
555 #endif
556 if (base_binfos)
557 {
558 int k;
559 tree chain = NULL_TREE;
560
561 /* Now unshare the structure beneath BASE_BINFO. */
562 for (k = TREE_VEC_LENGTH (base_binfos)-1;
563 k >= 0; k--)
564 {
565 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
566 if (! TREE_VIA_VIRTUAL (base_base_binfo))
567 TREE_VEC_ELT (base_binfos, k)
568 = make_binfo (BINFO_OFFSET (base_base_binfo),
569 base_base_binfo,
570 BINFO_VTABLE (base_base_binfo),
571 BINFO_VIRTUALS (base_base_binfo),
572 chain);
573 chain = TREE_VEC_ELT (base_binfos, k);
574 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
575 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
576 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
577 }
578 /* Now propagate the offset to the base types. */
579 propagate_binfo_offsets (base_binfo, offset);
580 }
581
582 /* Go to our next class that counts for offset propagation. */
583 i = j;
584 if (i < n_baselinks)
585 offset = size_binop (PLUS_EXPR, offset, delta);
586 }
587 }
588 }
589
590 /* Compute the actual offsets that our virtual base classes
591 will have *for this type*. This must be performed after
592 the fields are laid out, since virtual baseclasses must
593 lay down at the end of the record.
594
595 Returns the maximum number of virtual functions any of the virtual
596 baseclasses provide. */
597 int
598 layout_vbasetypes (rec, max)
599 tree rec;
600 int max;
601 {
602 /* Get all the virtual base types that this type uses.
603 The TREE_VALUE slot holds the virtual baseclass type. */
604 tree vbase_types = get_vbase_types (rec);
605
606 #ifdef STRUCTURE_SIZE_BOUNDARY
607 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
608 #else
609 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
610 #endif
611 int desired_align;
612
613 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
614 where CONST_SIZE is an integer
615 and VAR_SIZE is a tree expression.
616 If VAR_SIZE is null, the size is just CONST_SIZE.
617 Naturally we try to avoid using VAR_SIZE. */
618 register unsigned const_size = 0;
619 register tree var_size = 0;
620 int nonvirtual_const_size;
621 tree nonvirtual_var_size;
622
623 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
624
625 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
626 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
627 else
628 var_size = TYPE_SIZE (rec);
629
630 nonvirtual_const_size = const_size;
631 nonvirtual_var_size = var_size;
632
633 while (vbase_types)
634 {
635 tree basetype = BINFO_TYPE (vbase_types);
636 tree offset;
637
638 desired_align = TYPE_ALIGN (basetype);
639 record_align = MAX (record_align, desired_align);
640
641 if (const_size == 0)
642 offset = integer_zero_node;
643 else
644 {
645 /* Give each virtual base type the alignment it wants. */
646 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
647 * TYPE_ALIGN (basetype);
648 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
649 }
650
651 if (CLASSTYPE_VSIZE (basetype) > max)
652 max = CLASSTYPE_VSIZE (basetype);
653 BINFO_OFFSET (vbase_types) = offset;
654
655 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
656 {
657 /* Every virtual baseclass takes a least a UNIT, so that we can
658 take it's address and get something different for each base. */
659 const_size += MAX (BITS_PER_UNIT,
660 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
661 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
662 }
663 else if (var_size == 0)
664 var_size = TYPE_SIZE (basetype);
665 else
666 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
667
668 vbase_types = TREE_CHAIN (vbase_types);
669 }
670
671 if (const_size)
672 {
673 /* Because a virtual base might take a single byte above,
674 we have to re-adjust the total size to make sure it it
675 a multiple of the alignment. */
676 /* Give the whole object the alignment it wants. */
677 const_size = CEIL (const_size, record_align) * record_align;
678 }
679
680 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
681 here, as that is for this class, without any virtual base classes. */
682 TYPE_ALIGN (rec) = record_align;
683 if (const_size != nonvirtual_const_size)
684 {
685 CLASSTYPE_VBASE_SIZE (rec)
686 = size_int (const_size - nonvirtual_const_size);
687 TYPE_SIZE (rec) = size_int (const_size);
688 }
689
690 /* Now propagate offset information throughout the lattice
691 under the vbase type. */
692 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
693 vbase_types = TREE_CHAIN (vbase_types))
694 {
695 tree base_binfos = BINFO_BASETYPES (vbase_types);
696
697 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
698
699 if (base_binfos)
700 {
701 tree chain = NULL_TREE;
702 int j;
703 /* Now unshare the structure beneath BASE_BINFO. */
704
705 for (j = TREE_VEC_LENGTH (base_binfos)-1;
706 j >= 0; j--)
707 {
708 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
709 if (! TREE_VIA_VIRTUAL (base_base_binfo))
710 TREE_VEC_ELT (base_binfos, j)
711 = make_binfo (BINFO_OFFSET (base_base_binfo),
712 base_base_binfo,
713 BINFO_VTABLE (base_base_binfo),
714 BINFO_VIRTUALS (base_base_binfo),
715 chain);
716 chain = TREE_VEC_ELT (base_binfos, j);
717 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
718 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
719 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
720 }
721
722 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
723 }
724 }
725
726 return max;
727 }
728
729 /* Lay out the base types of a record type, REC.
730 Tentatively set the size and alignment of REC
731 according to the base types alone.
732
733 Offsets for immediate nonvirtual baseclasses are also computed here.
734
735 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
736 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
737
738 Returns list of virtual base classes in a FIELD_DECL chain. */
739 tree
740 layout_basetypes (rec, binfos)
741 tree rec, binfos;
742 {
743 /* Chain to hold all the new FIELD_DECLs which point at virtual
744 base classes. */
745 tree vbase_decls = NULL_TREE;
746
747 #ifdef STRUCTURE_SIZE_BOUNDARY
748 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
749 #else
750 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
751 #endif
752
753 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
754 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
755 the size is just CONST_SIZE. Naturally we try to avoid using
756 VAR_SIZE. And so far, we've been sucessful. */
757 #if 0
758 register tree var_size = 0;
759 #endif
760
761 register unsigned const_size = 0;
762 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
763
764 /* Handle basetypes almost like fields, but record their
765 offsets differently. */
766
767 for (i = 0; i < n_baseclasses; i++)
768 {
769 int inc, desired_align, int_vbase_size;
770 register tree base_binfo = TREE_VEC_ELT (binfos, i);
771 register tree basetype = BINFO_TYPE (base_binfo);
772 tree decl, offset;
773
774 if (TYPE_SIZE (basetype) == 0)
775 {
776 #if 0
777 /* This error is now reported in xref_tag, thus giving better
778 location information. */
779 error_with_aggr_type (base_binfo,
780 "base class `%s' has incomplete type");
781
782 TREE_VIA_PUBLIC (base_binfo) = 1;
783 TREE_VIA_PROTECTED (base_binfo) = 0;
784 TREE_VIA_VIRTUAL (base_binfo) = 0;
785
786 /* Should handle this better so that
787
788 class A;
789 class B: private A { virtual void F(); };
790
791 does not dump core when compiled. */
792 my_friendly_abort (121);
793 #endif
794 continue;
795 }
796
797 /* All basetypes are recorded in the association list of the
798 derived type. */
799
800 if (TREE_VIA_VIRTUAL (base_binfo))
801 {
802 int j;
803 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
804 + sizeof (VBASE_NAME) + 1);
805
806 /* The offset for a virtual base class is only used in computing
807 virtual function tables and for initializing virtual base
808 pointers. It is built once `get_vbase_types' is called. */
809
810 /* If this basetype can come from another vbase pointer
811 without an additional indirection, we will share
812 that pointer. If an indirection is involved, we
813 make our own pointer. */
814 for (j = 0; j < n_baseclasses; j++)
815 {
816 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
817 if (! TREE_VIA_VIRTUAL (other_base_binfo)
818 && binfo_member (basetype,
819 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
820 goto got_it;
821 }
822 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
823 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
824 build_pointer_type (basetype));
825 /* If you change any of the below, take a look at all the
826 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
827 them too. */
828 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
829 DECL_VIRTUAL_P (decl) = 1;
830 DECL_FIELD_CONTEXT (decl) = rec;
831 DECL_CLASS_CONTEXT (decl) = rec;
832 DECL_FCONTEXT (decl) = basetype;
833 DECL_SAVED_INSNS (decl) = NULL_RTX;
834 DECL_FIELD_SIZE (decl) = 0;
835 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
836 TREE_CHAIN (decl) = vbase_decls;
837 BINFO_VPTR_FIELD (base_binfo) = decl;
838 vbase_decls = decl;
839
840 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
841 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
842 {
843 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
844 "destructor `%s' non-virtual");
845 warning ("in inheritance relationship `%s: virtual %s'",
846 TYPE_NAME_STRING (rec),
847 TYPE_NAME_STRING (basetype));
848 }
849 got_it:
850 /* The space this decl occupies has already been accounted for. */
851 continue;
852 }
853
854 if (const_size == 0)
855 offset = integer_zero_node;
856 else
857 {
858 /* Give each base type the alignment it wants. */
859 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
860 * TYPE_ALIGN (basetype);
861 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
862
863 #if 0
864 /* bpk: Disabled this check until someone is willing to
865 claim it as theirs and explain exactly what circumstances
866 warrant the warning. */
867 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
868 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
869 {
870 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
871 "destructor `%s' non-virtual");
872 warning ("in inheritance relationship `%s:%s %s'",
873 TYPE_NAME_STRING (rec),
874 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
875 TYPE_NAME_STRING (basetype));
876 }
877 #endif
878 }
879 BINFO_OFFSET (base_binfo) = offset;
880 if (CLASSTYPE_VSIZE (basetype))
881 {
882 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
883 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
884 }
885 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
886 TYPE_BINFO (rec) = base_binfo;
887
888 /* Add only the amount of storage not present in
889 the virtual baseclasses. */
890
891 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
892 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
893 {
894 inc = MAX (record_align,
895 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
896 - int_vbase_size));
897
898 /* Record must have at least as much alignment as any field. */
899 desired_align = TYPE_ALIGN (basetype);
900 record_align = MAX (record_align, desired_align);
901
902 const_size += inc;
903 }
904 }
905
906 if (const_size)
907 CLASSTYPE_SIZE (rec) = size_int (const_size);
908 else
909 CLASSTYPE_SIZE (rec) = integer_zero_node;
910 CLASSTYPE_ALIGN (rec) = record_align;
911
912 return vbase_decls;
913 }
914 \f
915 /* Hashing of lists so that we don't make duplicates.
916 The entry point is `list_hash_canon'. */
917
918 /* Each hash table slot is a bucket containing a chain
919 of these structures. */
920
921 struct list_hash
922 {
923 struct list_hash *next; /* Next structure in the bucket. */
924 int hashcode; /* Hash code of this list. */
925 tree list; /* The list recorded here. */
926 };
927
928 /* Now here is the hash table. When recording a list, it is added
929 to the slot whose index is the hash code mod the table size.
930 Note that the hash table is used for several kinds of lists.
931 While all these live in the same table, they are completely independent,
932 and the hash code is computed differently for each of these. */
933
934 #define TYPE_HASH_SIZE 59
935 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
936
937 /* Compute a hash code for a list (chain of TREE_LIST nodes
938 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
939 TREE_COMMON slots), by adding the hash codes of the individual entries. */
940
941 int
942 list_hash (list)
943 tree list;
944 {
945 register int hashcode = 0;
946
947 if (TREE_CHAIN (list))
948 hashcode += TYPE_HASH (TREE_CHAIN (list));
949
950 if (TREE_VALUE (list))
951 hashcode += TYPE_HASH (TREE_VALUE (list));
952 else
953 hashcode += 1007;
954 if (TREE_PURPOSE (list))
955 hashcode += TYPE_HASH (TREE_PURPOSE (list));
956 else
957 hashcode += 1009;
958 return hashcode;
959 }
960
961 /* Look in the type hash table for a type isomorphic to TYPE.
962 If one is found, return it. Otherwise return 0. */
963
964 tree
965 list_hash_lookup (hashcode, list)
966 int hashcode;
967 tree list;
968 {
969 register struct list_hash *h;
970 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
971 if (h->hashcode == hashcode
972 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
973 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
974 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
975 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
976 && TREE_VALUE (h->list) == TREE_VALUE (list)
977 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
978 {
979 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
980 return h->list;
981 }
982 return 0;
983 }
984
985 /* Add an entry to the list-hash-table
986 for a list TYPE whose hash code is HASHCODE. */
987
988 void
989 list_hash_add (hashcode, list)
990 int hashcode;
991 tree list;
992 {
993 register struct list_hash *h;
994
995 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
996 h->hashcode = hashcode;
997 h->list = list;
998 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
999 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1000 }
1001
1002 /* Given TYPE, and HASHCODE its hash code, return the canonical
1003 object for an identical list if one already exists.
1004 Otherwise, return TYPE, and record it as the canonical object
1005 if it is a permanent object.
1006
1007 To use this function, first create a list of the sort you want.
1008 Then compute its hash code from the fields of the list that
1009 make it different from other similar lists.
1010 Then call this function and use the value.
1011 This function frees the list you pass in if it is a duplicate. */
1012
1013 /* Set to 1 to debug without canonicalization. Never set by program. */
1014 static int debug_no_list_hash = 0;
1015
1016 tree
1017 list_hash_canon (hashcode, list)
1018 int hashcode;
1019 tree list;
1020 {
1021 tree t1;
1022
1023 if (debug_no_list_hash)
1024 return list;
1025
1026 t1 = list_hash_lookup (hashcode, list);
1027 if (t1 != 0)
1028 {
1029 obstack_free (&class_obstack, list);
1030 return t1;
1031 }
1032
1033 /* If this is a new list, record it for later reuse. */
1034 list_hash_add (hashcode, list);
1035
1036 return list;
1037 }
1038
1039 tree
1040 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1041 int via_public, via_virtual, via_protected;
1042 tree purpose, value, chain;
1043 {
1044 struct obstack *ambient_obstack = current_obstack;
1045 tree t;
1046 int hashcode;
1047
1048 current_obstack = &class_obstack;
1049 t = tree_cons (purpose, value, chain);
1050 TREE_VIA_PUBLIC (t) = via_public;
1051 TREE_VIA_PROTECTED (t) = via_protected;
1052 TREE_VIA_VIRTUAL (t) = via_virtual;
1053 hashcode = list_hash (t);
1054 t = list_hash_canon (hashcode, t);
1055 current_obstack = ambient_obstack;
1056 return t;
1057 }
1058
1059 /* Constructor for hashed lists. */
1060 tree
1061 hash_tree_chain (value, chain)
1062 tree value, chain;
1063 {
1064 struct obstack *ambient_obstack = current_obstack;
1065 tree t;
1066 int hashcode;
1067
1068 current_obstack = &class_obstack;
1069 t = tree_cons (NULL_TREE, value, chain);
1070 hashcode = list_hash (t);
1071 t = list_hash_canon (hashcode, t);
1072 current_obstack = ambient_obstack;
1073 return t;
1074 }
1075
1076 /* Similar, but used for concatenating two lists. */
1077 tree
1078 hash_chainon (list1, list2)
1079 tree list1, list2;
1080 {
1081 if (list2 == 0)
1082 return list1;
1083 if (list1 == 0)
1084 return list2;
1085 if (TREE_CHAIN (list1) == NULL_TREE)
1086 return hash_tree_chain (TREE_VALUE (list1), list2);
1087 return hash_tree_chain (TREE_VALUE (list1),
1088 hash_chainon (TREE_CHAIN (list1), list2));
1089 }
1090
1091 static tree
1092 get_identifier_list (value)
1093 tree value;
1094 {
1095 tree list = IDENTIFIER_AS_LIST (value);
1096 if (list != NULL_TREE
1097 && (TREE_CODE (list) != TREE_LIST
1098 || TREE_VALUE (list) != value))
1099 list = NULL_TREE;
1100 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1101 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1102 && IDENTIFIER_TYPE_VALUE (value)
1103 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1104 {
1105 tree type = IDENTIFIER_TYPE_VALUE (value);
1106
1107 if (TYPE_PTRMEMFUNC_P (type))
1108 list = NULL_TREE;
1109 else if (type == current_class_type)
1110 /* Don't mess up the constructor name. */
1111 list = tree_cons (NULL_TREE, value, NULL_TREE);
1112 else
1113 {
1114 register tree id;
1115 /* This will return the correct thing for regular types,
1116 nested types, and templates. Yay! */
1117 if (TYPE_NESTED_NAME (type))
1118 id = TYPE_NESTED_NAME (type);
1119 else
1120 id = TYPE_IDENTIFIER (type);
1121
1122 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1123 CLASSTYPE_ID_AS_LIST (type)
1124 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1125 list = CLASSTYPE_ID_AS_LIST (type);
1126 }
1127 }
1128 return list;
1129 }
1130
1131 tree
1132 get_decl_list (value)
1133 tree value;
1134 {
1135 tree list = NULL_TREE;
1136
1137 if (TREE_CODE (value) == IDENTIFIER_NODE)
1138 list = get_identifier_list (value);
1139 else if (TREE_CODE (value) == RECORD_TYPE
1140 && TYPE_LANG_SPECIFIC (value))
1141 list = CLASSTYPE_AS_LIST (value);
1142
1143 if (list != NULL_TREE)
1144 {
1145 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1146 return list;
1147 }
1148
1149 return build_decl_list (NULL_TREE, value);
1150 }
1151
1152 /* Look in the type hash table for a type isomorphic to
1153 `build_tree_list (NULL_TREE, VALUE)'.
1154 If one is found, return it. Otherwise return 0. */
1155
1156 tree
1157 list_hash_lookup_or_cons (value)
1158 tree value;
1159 {
1160 register int hashcode = TYPE_HASH (value);
1161 register struct list_hash *h;
1162 struct obstack *ambient_obstack;
1163 tree list = NULL_TREE;
1164
1165 if (TREE_CODE (value) == IDENTIFIER_NODE)
1166 list = get_identifier_list (value);
1167 else if (TREE_CODE (value) == TYPE_DECL
1168 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1169 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1170 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1171 else if (TREE_CODE (value) == RECORD_TYPE
1172 && TYPE_LANG_SPECIFIC (value))
1173 list = CLASSTYPE_AS_LIST (value);
1174
1175 if (list != NULL_TREE)
1176 {
1177 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1178 return list;
1179 }
1180
1181 if (debug_no_list_hash)
1182 return hash_tree_chain (value, NULL_TREE);
1183
1184 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1185 if (h->hashcode == hashcode
1186 && TREE_VIA_VIRTUAL (h->list) == 0
1187 && TREE_VIA_PUBLIC (h->list) == 0
1188 && TREE_VIA_PROTECTED (h->list) == 0
1189 && TREE_PURPOSE (h->list) == 0
1190 && TREE_VALUE (h->list) == value)
1191 {
1192 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1193 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1194 return h->list;
1195 }
1196
1197 ambient_obstack = current_obstack;
1198 current_obstack = &class_obstack;
1199 list = build_tree_list (NULL_TREE, value);
1200 list_hash_add (hashcode, list);
1201 current_obstack = ambient_obstack;
1202 return list;
1203 }
1204 \f
1205 /* Build an association between TYPE and some parameters:
1206
1207 OFFSET is the offset added to `this' to convert it to a pointer
1208 of type `TYPE *'
1209
1210 BINFO is the base binfo to use, if we are deriving from one. This
1211 is necessary, as we want specialized parent binfos from base
1212 classes, so that the VTABLE_NAMEs of bases are for the most derived
1213 type, instead of of the simple type.
1214
1215 VTABLE is the virtual function table with which to initialize
1216 sub-objects of type TYPE.
1217
1218 VIRTUALS are the virtual functions sitting in VTABLE.
1219
1220 CHAIN are more associations we must retain. */
1221
1222 tree
1223 make_binfo (offset, binfo, vtable, virtuals, chain)
1224 tree offset, binfo;
1225 tree vtable, virtuals;
1226 tree chain;
1227 {
1228 tree new_binfo = make_tree_vec (6);
1229 tree type;
1230
1231 if (TREE_CODE (binfo) == TREE_VEC)
1232 type = BINFO_TYPE (binfo);
1233 else
1234 {
1235 type = binfo;
1236 binfo = TYPE_BINFO (binfo);
1237 }
1238
1239 TREE_CHAIN (new_binfo) = chain;
1240 if (chain)
1241 TREE_USED (new_binfo) = TREE_USED (chain);
1242
1243 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1244 BINFO_OFFSET (new_binfo) = offset;
1245 BINFO_VTABLE (new_binfo) = vtable;
1246 BINFO_VIRTUALS (new_binfo) = virtuals;
1247 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1248
1249 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1250 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1251 return new_binfo;
1252 }
1253
1254 /* Return the binfo value for ELEM in TYPE. */
1255
1256 tree
1257 binfo_value (elem, type)
1258 tree elem;
1259 tree type;
1260 {
1261 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1262 compiler_error ("base class `%s' ambiguous in binfo_value",
1263 TYPE_NAME_STRING (elem));
1264 if (elem == type)
1265 return TYPE_BINFO (type);
1266 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1267 return type;
1268 return get_binfo (elem, type, 0);
1269 }
1270
1271 tree
1272 reverse_path (path)
1273 tree path;
1274 {
1275 register tree prev = 0, tmp, next;
1276 for (tmp = path; tmp; tmp = next)
1277 {
1278 next = BINFO_INHERITANCE_CHAIN (tmp);
1279 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1280 prev = tmp;
1281 }
1282 return prev;
1283 }
1284
1285 tree
1286 virtual_member (elem, list)
1287 tree elem;
1288 tree list;
1289 {
1290 tree t;
1291 tree rval, nval;
1292
1293 for (t = list; t; t = TREE_CHAIN (t))
1294 if (elem == BINFO_TYPE (t))
1295 return t;
1296 rval = 0;
1297 for (t = list; t; t = TREE_CHAIN (t))
1298 {
1299 tree binfos = BINFO_BASETYPES (t);
1300 int i;
1301
1302 if (binfos != NULL_TREE)
1303 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1304 {
1305 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1306 if (nval)
1307 {
1308 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1309 my_friendly_abort (104);
1310 rval = nval;
1311 }
1312 }
1313 }
1314 return rval;
1315 }
1316
1317 void
1318 debug_binfo (elem)
1319 tree elem;
1320 {
1321 int i;
1322 tree virtuals;
1323
1324 fprintf (stderr, "type \"%s\"; offset = %d\n",
1325 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1326 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1327 fprintf (stderr, "vtable type:\n");
1328 debug_tree (BINFO_TYPE (elem));
1329 if (BINFO_VTABLE (elem))
1330 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1331 else
1332 fprintf (stderr, "no vtable decl yet\n");
1333 fprintf (stderr, "virtuals:\n");
1334 virtuals = BINFO_VIRTUALS (elem);
1335 if (virtuals != 0)
1336 {
1337 /* skip the rtti type descriptor entry */
1338 virtuals = TREE_CHAIN (virtuals);
1339 }
1340 i = 1;
1341 while (virtuals)
1342 {
1343 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1344 fprintf (stderr, "%s [%d =? %d]\n",
1345 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1346 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1347 virtuals = TREE_CHAIN (virtuals);
1348 i += 1;
1349 }
1350 }
1351
1352 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1353 We expect a null pointer to mark the end of the chain.
1354 This is the Lisp primitive `length'. */
1355
1356 int
1357 decl_list_length (t)
1358 tree t;
1359 {
1360 register tree tail;
1361 register int len = 0;
1362
1363 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1364 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1365 for (tail = t; tail; tail = DECL_CHAIN (tail))
1366 len++;
1367
1368 return len;
1369 }
1370
1371 int
1372 count_functions (t)
1373 tree t;
1374 {
1375 if (TREE_CODE (t) == FUNCTION_DECL)
1376 return 1;
1377 else if (TREE_CODE (t) == TREE_LIST)
1378 return decl_list_length (TREE_VALUE (t));
1379
1380 my_friendly_abort (359);
1381 return 0;
1382 }
1383
1384 /* Like value_member, but for DECL_CHAINs. */
1385 tree
1386 decl_value_member (elem, list)
1387 tree elem, list;
1388 {
1389 while (list)
1390 {
1391 if (elem == list)
1392 return list;
1393 list = DECL_CHAIN (list);
1394 }
1395 return NULL_TREE;
1396 }
1397
1398 int
1399 is_overloaded_fn (x)
1400 tree x;
1401 {
1402 if (TREE_CODE (x) == FUNCTION_DECL)
1403 return 1;
1404
1405 if (TREE_CODE (x) == TREE_LIST
1406 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1407 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1408 return 1;
1409
1410 return 0;
1411 }
1412
1413 int
1414 really_overloaded_fn (x)
1415 tree x;
1416 {
1417 if (TREE_CODE (x) == TREE_LIST
1418 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1419 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1420 return 1;
1421
1422 return 0;
1423 }
1424
1425 tree
1426 get_first_fn (from)
1427 tree from;
1428 {
1429 if (TREE_CODE (from) == FUNCTION_DECL)
1430 return from;
1431
1432 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1433
1434 return TREE_VALUE (from);
1435 }
1436
1437 tree
1438 fnaddr_from_vtable_entry (entry)
1439 tree entry;
1440 {
1441 if (flag_vtable_thunks)
1442 {
1443 tree func = entry;
1444 if (TREE_CODE (func) == ADDR_EXPR)
1445 func = TREE_OPERAND (func, 0);
1446 if (TREE_CODE (func) == THUNK_DECL)
1447 return DECL_INITIAL (func);
1448 else
1449 return entry;
1450 }
1451 else
1452 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1453 }
1454
1455 void
1456 set_fnaddr_from_vtable_entry (entry, value)
1457 tree entry, value;
1458 {
1459 if (flag_vtable_thunks)
1460 abort ();
1461 else
1462 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1463 }
1464
1465 tree
1466 function_arg_chain (t)
1467 tree t;
1468 {
1469 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1470 }
1471
1472 int
1473 promotes_to_aggr_type (t, code)
1474 tree t;
1475 enum tree_code code;
1476 {
1477 if (TREE_CODE (t) == code)
1478 t = TREE_TYPE (t);
1479 return IS_AGGR_TYPE (t);
1480 }
1481
1482 int
1483 is_aggr_type_2 (t1, t2)
1484 tree t1, t2;
1485 {
1486 if (TREE_CODE (t1) != TREE_CODE (t2))
1487 return 0;
1488 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1489 }
1490
1491 /* Give message using types TYPE1 and TYPE2 as arguments.
1492 PFN is the function which will print the message;
1493 S is the format string for PFN to use. */
1494 void
1495 message_2_types (pfn, s, type1, type2)
1496 void (*pfn) ();
1497 char *s;
1498 tree type1, type2;
1499 {
1500 tree name1 = TYPE_NAME (type1);
1501 tree name2 = TYPE_NAME (type2);
1502 if (TREE_CODE (name1) == TYPE_DECL)
1503 name1 = DECL_NAME (name1);
1504 if (TREE_CODE (name2) == TYPE_DECL)
1505 name2 = DECL_NAME (name2);
1506 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1507 }
1508 \f
1509 #define PRINT_RING_SIZE 4
1510
1511 char *
1512 lang_printable_name (decl)
1513 tree decl;
1514 {
1515 static tree decl_ring[PRINT_RING_SIZE];
1516 static char *print_ring[PRINT_RING_SIZE];
1517 static int ring_counter;
1518 int i;
1519
1520 /* Only cache functions. */
1521 if (TREE_CODE (decl) != FUNCTION_DECL
1522 || DECL_LANG_SPECIFIC (decl) == 0)
1523 return decl_as_string (decl, 1);
1524
1525 /* See if this print name is lying around. */
1526 for (i = 0; i < PRINT_RING_SIZE; i++)
1527 if (decl_ring[i] == decl)
1528 /* yes, so return it. */
1529 return print_ring[i];
1530
1531 if (++ring_counter == PRINT_RING_SIZE)
1532 ring_counter = 0;
1533
1534 if (current_function_decl != NULL_TREE)
1535 {
1536 if (decl_ring[ring_counter] == current_function_decl)
1537 ring_counter += 1;
1538 if (ring_counter == PRINT_RING_SIZE)
1539 ring_counter = 0;
1540 if (decl_ring[ring_counter] == current_function_decl)
1541 my_friendly_abort (106);
1542 }
1543
1544 if (print_ring[ring_counter])
1545 free (print_ring[ring_counter]);
1546
1547 {
1548 int print_ret_type_p
1549 = (!DECL_CONSTRUCTOR_P (decl)
1550 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1551
1552 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1553 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1554 strcpy (print_ring[ring_counter], name);
1555 decl_ring[ring_counter] = decl;
1556 }
1557 return print_ring[ring_counter];
1558 }
1559 \f
1560 /* Comparison function for sorting identifiers in RAISES lists.
1561 Note that because IDENTIFIER_NODEs are unique, we can sort
1562 them by address, saving an indirection. */
1563 static int
1564 id_cmp (p1, p2)
1565 tree *p1, *p2;
1566 {
1567 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1568 }
1569
1570 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1571 listed in RAISES. */
1572 tree
1573 build_exception_variant (ctype, type, raises)
1574 tree ctype, type;
1575 tree raises;
1576 {
1577 int i;
1578 tree v = TYPE_MAIN_VARIANT (type);
1579 tree t, t2, cname;
1580 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1581 int constp = TYPE_READONLY (type);
1582 int volatilep = TYPE_VOLATILE (type);
1583
1584 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1585 {
1586 if (TYPE_READONLY (v) != constp
1587 || TYPE_VOLATILE (v) != volatilep)
1588 continue;
1589
1590 /* @@ This should do set equality, not exact match. */
1591 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1592 /* List of exceptions raised matches previously found list.
1593
1594 @@ Nice to free up storage used in consing up the
1595 @@ list of exceptions raised. */
1596 return v;
1597 }
1598
1599 /* Need to build a new variant. */
1600 v = copy_node (type);
1601 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1602 TYPE_NEXT_VARIANT (type) = v;
1603 if (raises && ! TREE_PERMANENT (raises))
1604 {
1605 push_obstacks_nochange ();
1606 end_temporary_allocation ();
1607 raises = copy_list (raises);
1608 pop_obstacks ();
1609 }
1610 TYPE_RAISES_EXCEPTIONS (v) = raises;
1611 return v;
1612 }
1613
1614 /* Subroutine of copy_to_permanent
1615
1616 Assuming T is a node build bottom-up, make it all exist on
1617 permanent obstack, if it is not permanent already. */
1618
1619 tree
1620 mapcar (t, func)
1621 tree t;
1622 tree (*func)();
1623 {
1624 enum tree_code code;
1625 tree tmp;
1626
1627 if (t == NULL_TREE)
1628 return t;
1629
1630 if (tmp = func (t), tmp != NULL_TREE)
1631 return tmp;
1632
1633 switch (code = TREE_CODE (t))
1634 {
1635 case ERROR_MARK:
1636 return error_mark_node;
1637
1638 case VAR_DECL:
1639 case FUNCTION_DECL:
1640 case CONST_DECL:
1641 break;
1642
1643 case PARM_DECL:
1644 {
1645 tree chain = TREE_CHAIN (t);
1646 t = copy_node (t);
1647 TREE_CHAIN (t) = mapcar (chain, func);
1648 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1649 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1650 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1651 return t;
1652 }
1653
1654 case TREE_LIST:
1655 {
1656 tree chain = TREE_CHAIN (t);
1657 t = copy_node (t);
1658 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1659 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1660 TREE_CHAIN (t) = mapcar (chain, func);
1661 return t;
1662 }
1663
1664 case TREE_VEC:
1665 {
1666 int len = TREE_VEC_LENGTH (t);
1667
1668 t = copy_node (t);
1669 while (len--)
1670 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1671 return t;
1672 }
1673
1674 case INTEGER_CST:
1675 case REAL_CST:
1676 case STRING_CST:
1677 return copy_node (t);
1678
1679 case COND_EXPR:
1680 case TARGET_EXPR:
1681 case NEW_EXPR:
1682 t = copy_node (t);
1683 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1684 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1685 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1686 return t;
1687
1688 case SAVE_EXPR:
1689 t = copy_node (t);
1690 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1691 return t;
1692
1693 case MODIFY_EXPR:
1694 case PLUS_EXPR:
1695 case MINUS_EXPR:
1696 case MULT_EXPR:
1697 case TRUNC_DIV_EXPR:
1698 case TRUNC_MOD_EXPR:
1699 case MIN_EXPR:
1700 case MAX_EXPR:
1701 case LSHIFT_EXPR:
1702 case RSHIFT_EXPR:
1703 case BIT_IOR_EXPR:
1704 case BIT_XOR_EXPR:
1705 case BIT_AND_EXPR:
1706 case BIT_ANDTC_EXPR:
1707 case TRUTH_ANDIF_EXPR:
1708 case TRUTH_ORIF_EXPR:
1709 case LT_EXPR:
1710 case LE_EXPR:
1711 case GT_EXPR:
1712 case GE_EXPR:
1713 case EQ_EXPR:
1714 case NE_EXPR:
1715 case CEIL_DIV_EXPR:
1716 case FLOOR_DIV_EXPR:
1717 case ROUND_DIV_EXPR:
1718 case CEIL_MOD_EXPR:
1719 case FLOOR_MOD_EXPR:
1720 case ROUND_MOD_EXPR:
1721 case COMPOUND_EXPR:
1722 case PREDECREMENT_EXPR:
1723 case PREINCREMENT_EXPR:
1724 case POSTDECREMENT_EXPR:
1725 case POSTINCREMENT_EXPR:
1726 case CALL_EXPR:
1727 t = copy_node (t);
1728 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1729 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1730 return t;
1731
1732 case CONVERT_EXPR:
1733 case ADDR_EXPR:
1734 case INDIRECT_REF:
1735 case NEGATE_EXPR:
1736 case BIT_NOT_EXPR:
1737 case TRUTH_NOT_EXPR:
1738 case NOP_EXPR:
1739 case COMPONENT_REF:
1740 t = copy_node (t);
1741 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1742 return t;
1743
1744 case POINTER_TYPE:
1745 return build_pointer_type (mapcar (TREE_TYPE (t), func));
1746 case REFERENCE_TYPE:
1747 return build_reference_type (mapcar (TREE_TYPE (t), func));
1748 case FUNCTION_TYPE:
1749 return build_function_type (mapcar (TREE_TYPE (t), func),
1750 mapcar (TYPE_ARG_TYPES (t), func));
1751 case ARRAY_TYPE:
1752 return build_array_type (mapcar (TREE_TYPE (t), func),
1753 mapcar (TYPE_DOMAIN (t), func));
1754 case INTEGER_TYPE:
1755 return build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1756
1757 case OFFSET_TYPE:
1758 return build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1759 mapcar (TREE_TYPE (t), func));
1760 case METHOD_TYPE:
1761 return build_method_type
1762 (mapcar (TYPE_METHOD_BASETYPE (t), func),
1763 build_function_type
1764 (mapcar (TREE_TYPE (t), func),
1765 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func)));
1766
1767 case RECORD_TYPE:
1768 if (TYPE_PTRMEMFUNC_P (t))
1769 return build_ptrmemfunc_type
1770 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1771 /* else fall through */
1772
1773 /* This list is incomplete, but should suffice for now.
1774 It is very important that `sorry' does not call
1775 `report_error_function'. That could cause an infinite loop. */
1776 default:
1777 sorry ("initializer contains unrecognized tree code");
1778 return error_mark_node;
1779
1780 }
1781 my_friendly_abort (107);
1782 /* NOTREACHED */
1783 return NULL_TREE;
1784 }
1785
1786 static tree
1787 perm_manip (t)
1788 tree t;
1789 {
1790 if (TREE_PERMANENT (t))
1791 return t;
1792 return NULL_TREE;
1793 }
1794
1795 /* Assuming T is a node built bottom-up, make it all exist on
1796 permanent obstack, if it is not permanent already. */
1797 tree
1798 copy_to_permanent (t)
1799 tree t;
1800 {
1801 register struct obstack *ambient_obstack = current_obstack;
1802 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1803 int resume;
1804
1805 if (t == NULL_TREE || TREE_PERMANENT (t))
1806 return t;
1807
1808 saveable_obstack = &permanent_obstack;
1809 current_obstack = saveable_obstack;
1810 resume = suspend_momentary ();
1811
1812 t = mapcar (t, perm_manip);
1813
1814 resume_momentary (resume);
1815 current_obstack = ambient_obstack;
1816 saveable_obstack = ambient_saveable_obstack;
1817
1818 return t;
1819 }
1820
1821 void
1822 print_lang_statistics ()
1823 {
1824 extern struct obstack maybepermanent_obstack;
1825 print_obstack_statistics ("class_obstack", &class_obstack);
1826 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1827 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1828 print_search_statistics ();
1829 print_class_statistics ();
1830 }
1831
1832 /* This is used by the `assert' macro. It is provided in libgcc.a,
1833 which `cc' doesn't know how to link. Note that the C++ front-end
1834 no longer actually uses the `assert' macro (instead, it calls
1835 my_friendly_assert). But all of the back-end files still need this. */
1836 void
1837 __eprintf (string, expression, line, filename)
1838 #ifdef __STDC__
1839 const char *string;
1840 const char *expression;
1841 unsigned line;
1842 const char *filename;
1843 #else
1844 char *string;
1845 char *expression;
1846 unsigned line;
1847 char *filename;
1848 #endif
1849 {
1850 fprintf (stderr, string, expression, line, filename);
1851 fflush (stderr);
1852 abort ();
1853 }
1854
1855 /* Return, as an INTEGER_CST node, the number of elements for
1856 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1857
1858 tree
1859 array_type_nelts_top (type)
1860 tree type;
1861 {
1862 return fold (build (PLUS_EXPR, sizetype,
1863 array_type_nelts (type),
1864 integer_one_node));
1865 }
1866
1867 /* Return, as an INTEGER_CST node, the number of elements for
1868 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1869 ARRAY_TYPEs that are clumped together. */
1870
1871 tree
1872 array_type_nelts_total (type)
1873 tree type;
1874 {
1875 tree sz = array_type_nelts_top (type);
1876 type = TREE_TYPE (type);
1877 while (TREE_CODE (type) == ARRAY_TYPE)
1878 {
1879 tree n = array_type_nelts_top (type);
1880 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1881 type = TREE_TYPE (type);
1882 }
1883 return sz;
1884 }
1885
1886 static
1887 tree
1888 bot_manip (t)
1889 tree t;
1890 {
1891 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1892 return t;
1893 else if (TREE_CODE (t) == TARGET_EXPR)
1894 return build_cplus_new (TREE_TYPE (t),
1895 break_out_target_exprs (TREE_OPERAND (t, 1)), 0);
1896 return NULL_TREE;
1897 }
1898
1899 /* Actually, we'll just clean out the target exprs for the moment. */
1900 tree
1901 break_out_target_exprs (t)
1902 tree t;
1903 {
1904 return mapcar (t, bot_manip);
1905 }