110f41e8c8b665536f6ca065acdcc27f037594a5
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21 #include "config.h"
22 #include <stdio.h>
23 #include "obstack.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28
29 #define CEIL(x,y) (((x) + (y) - 1) / (y))
30
31 /* Return nonzero if REF is an lvalue valid for this language.
32 Lvalues can be assigned, unless they have TREE_READONLY.
33 Lvalues can have their address taken, unless they have DECL_REGISTER. */
34
35 int
36 real_lvalue_p (ref)
37 tree ref;
38 {
39 if (! language_lvalue_valid (ref))
40 return 0;
41
42 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
43 return 1;
44
45 if (ref == current_class_decl && flag_this_is_variable <= 0)
46 return 0;
47
48 switch (TREE_CODE (ref))
49 {
50 /* preincrements and predecrements are valid lvals, provided
51 what they refer to are valid lvals. */
52 case PREINCREMENT_EXPR:
53 case PREDECREMENT_EXPR:
54 case COMPONENT_REF:
55 case SAVE_EXPR:
56 return real_lvalue_p (TREE_OPERAND (ref, 0));
57
58 case STRING_CST:
59 return 1;
60
61 case VAR_DECL:
62 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
63 && DECL_LANG_SPECIFIC (ref)
64 && DECL_IN_AGGR_P (ref))
65 return 0;
66 case INDIRECT_REF:
67 case ARRAY_REF:
68 case PARM_DECL:
69 case RESULT_DECL:
70 case ERROR_MARK:
71 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
72 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
73 return 1;
74 break;
75
76 case WITH_CLEANUP_EXPR:
77 return real_lvalue_p (TREE_OPERAND (ref, 0));
78
79 /* A currently unresolved scope ref. */
80 case SCOPE_REF:
81 my_friendly_abort (103);
82 case OFFSET_REF:
83 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
84 return 1;
85 return real_lvalue_p (TREE_OPERAND (ref, 0))
86 && real_lvalue_p (TREE_OPERAND (ref, 1));
87 break;
88
89 case COND_EXPR:
90 return (real_lvalue_p (TREE_OPERAND (ref, 1))
91 && real_lvalue_p (TREE_OPERAND (ref, 2)));
92
93 case MODIFY_EXPR:
94 return 1;
95
96 case COMPOUND_EXPR:
97 return real_lvalue_p (TREE_OPERAND (ref, 1));
98
99 case MAX_EXPR:
100 case MIN_EXPR:
101 return (real_lvalue_p (TREE_OPERAND (ref, 0))
102 && real_lvalue_p (TREE_OPERAND (ref, 1)));
103 }
104
105 return 0;
106 }
107
108 int
109 lvalue_p (ref)
110 tree ref;
111 {
112 if (! language_lvalue_valid (ref))
113 return 0;
114
115 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
116 return 1;
117
118 if (ref == current_class_decl && flag_this_is_variable <= 0)
119 return 0;
120
121 switch (TREE_CODE (ref))
122 {
123 /* preincrements and predecrements are valid lvals, provided
124 what they refer to are valid lvals. */
125 case PREINCREMENT_EXPR:
126 case PREDECREMENT_EXPR:
127 case COMPONENT_REF:
128 case SAVE_EXPR:
129 return lvalue_p (TREE_OPERAND (ref, 0));
130
131 case STRING_CST:
132 return 1;
133
134 case VAR_DECL:
135 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
136 && DECL_LANG_SPECIFIC (ref)
137 && DECL_IN_AGGR_P (ref))
138 return 0;
139 case INDIRECT_REF:
140 case ARRAY_REF:
141 case PARM_DECL:
142 case RESULT_DECL:
143 case ERROR_MARK:
144 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
145 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
146 return 1;
147 break;
148
149 case WITH_CLEANUP_EXPR:
150 return lvalue_p (TREE_OPERAND (ref, 0));
151
152 case TARGET_EXPR:
153 return 1;
154
155 case CALL_EXPR:
156 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
157 return 1;
158 break;
159
160 /* A currently unresolved scope ref. */
161 case SCOPE_REF:
162 my_friendly_abort (103);
163 case OFFSET_REF:
164 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
165 return 1;
166 return lvalue_p (TREE_OPERAND (ref, 0))
167 && lvalue_p (TREE_OPERAND (ref, 1));
168 break;
169
170 case COND_EXPR:
171 return (lvalue_p (TREE_OPERAND (ref, 1))
172 && lvalue_p (TREE_OPERAND (ref, 2)));
173
174 case MODIFY_EXPR:
175 return 1;
176
177 case COMPOUND_EXPR:
178 return lvalue_p (TREE_OPERAND (ref, 1));
179
180 case MAX_EXPR:
181 case MIN_EXPR:
182 return (lvalue_p (TREE_OPERAND (ref, 0))
183 && lvalue_p (TREE_OPERAND (ref, 1)));
184 }
185
186 return 0;
187 }
188
189 /* Return nonzero if REF is an lvalue valid for this language;
190 otherwise, print an error message and return zero. */
191
192 int
193 lvalue_or_else (ref, string)
194 tree ref;
195 char *string;
196 {
197 int win = lvalue_p (ref);
198 if (! win)
199 error ("non-lvalue in %s", string);
200 return win;
201 }
202
203 /* INIT is a CALL_EXPR which needs info about its target.
204 TYPE is the type that this initialization should appear to have.
205
206 Build an encapsulation of the initialization to perform
207 and return it so that it can be processed by language-independent
208 and language-specific expression expanders.
209
210 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
211 Otherwise, cleanups are not built here. For example, when building
212 an initialization for a stack slot, since the called function handles
213 the cleanup, we would not want to do it here. */
214 tree
215 build_cplus_new (type, init, with_cleanup_p)
216 tree type;
217 tree init;
218 int with_cleanup_p;
219 {
220 tree slot;
221 tree rval;
222
223 slot = build (VAR_DECL, type);
224 layout_decl (slot, 0);
225 rval = build (NEW_EXPR, type,
226 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
227 TREE_SIDE_EFFECTS (rval) = 1;
228 TREE_ADDRESSABLE (rval) = 1;
229 rval = build (TARGET_EXPR, type, slot, rval, 0);
230 TREE_SIDE_EFFECTS (rval) = 1;
231 TREE_ADDRESSABLE (rval) = 1;
232
233 #if 0
234 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
235 {
236 TREE_OPERAND (rval, 2) = error_mark_node;
237 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
238 build_delete (TYPE_POINTER_TO (type),
239 build_unary_op (ADDR_EXPR, slot, 0),
240 integer_two_node,
241 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
242 TREE_SIDE_EFFECTS (rval) = 1;
243 TREE_ADDRESSABLE (rval) = 1;
244 }
245 #endif
246 return rval;
247 }
248
249 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
250 these CALL_EXPRs with tree nodes that will perform the cleanups. */
251
252 tree
253 break_out_cleanups (exp)
254 tree exp;
255 {
256 tree tmp = exp;
257
258 if (TREE_CODE (tmp) == CALL_EXPR
259 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
260 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
261
262 while (TREE_CODE (tmp) == NOP_EXPR
263 || TREE_CODE (tmp) == CONVERT_EXPR
264 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
265 {
266 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
267 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
268 {
269 TREE_OPERAND (tmp, 0)
270 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
271 TREE_OPERAND (tmp, 0), 1);
272 break;
273 }
274 else
275 tmp = TREE_OPERAND (tmp, 0);
276 }
277 return exp;
278 }
279
280 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
281 copies where they are found. Returns a deep copy all nodes transitively
282 containing CALL_EXPRs. */
283
284 tree
285 break_out_calls (exp)
286 tree exp;
287 {
288 register tree t1, t2;
289 register enum tree_code code;
290 register int changed = 0;
291 register int i;
292
293 if (exp == NULL_TREE)
294 return exp;
295
296 code = TREE_CODE (exp);
297
298 if (code == CALL_EXPR)
299 return copy_node (exp);
300
301 /* Don't try and defeat a save_expr, as it should only be done once. */
302 if (code == SAVE_EXPR)
303 return exp;
304
305 switch (TREE_CODE_CLASS (code))
306 {
307 default:
308 abort ();
309
310 case 'c': /* a constant */
311 case 't': /* a type node */
312 case 'x': /* something random, like an identifier or an ERROR_MARK. */
313 return exp;
314
315 case 'd': /* A decl node */
316 #if 0 /* This is bogus. jason 9/21/94 */
317
318 t1 = break_out_calls (DECL_INITIAL (exp));
319 if (t1 != DECL_INITIAL (exp))
320 {
321 exp = copy_node (exp);
322 DECL_INITIAL (exp) = t1;
323 }
324 #endif
325 return exp;
326
327 case 'b': /* A block node */
328 {
329 /* Don't know how to handle these correctly yet. Must do a
330 break_out_calls on all DECL_INITIAL values for local variables,
331 and also break_out_calls on all sub-blocks and sub-statements. */
332 abort ();
333 }
334 return exp;
335
336 case 'e': /* an expression */
337 case 'r': /* a reference */
338 case 's': /* an expression with side effects */
339 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
340 {
341 t1 = break_out_calls (TREE_OPERAND (exp, i));
342 if (t1 != TREE_OPERAND (exp, i))
343 {
344 exp = copy_node (exp);
345 TREE_OPERAND (exp, i) = t1;
346 }
347 }
348 return exp;
349
350 case '<': /* a comparison expression */
351 case '2': /* a binary arithmetic expression */
352 t2 = break_out_calls (TREE_OPERAND (exp, 1));
353 if (t2 != TREE_OPERAND (exp, 1))
354 changed = 1;
355 case '1': /* a unary arithmetic expression */
356 t1 = break_out_calls (TREE_OPERAND (exp, 0));
357 if (t1 != TREE_OPERAND (exp, 0))
358 changed = 1;
359 if (changed)
360 {
361 if (tree_code_length[(int) code] == 1)
362 return build1 (code, TREE_TYPE (exp), t1);
363 else
364 return build (code, TREE_TYPE (exp), t1, t2);
365 }
366 return exp;
367 }
368
369 }
370 \f
371 extern struct obstack *current_obstack;
372 extern struct obstack permanent_obstack, class_obstack;
373 extern struct obstack *saveable_obstack;
374
375 /* Here is how primitive or already-canonicalized types' hash
376 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
377 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
378
379 /* Construct, lay out and return the type of methods belonging to class
380 BASETYPE and whose arguments are described by ARGTYPES and whose values
381 are described by RETTYPE. If each type exists already, reuse it. */
382 tree
383 build_cplus_method_type (basetype, rettype, argtypes)
384 tree basetype, rettype, argtypes;
385 {
386 register tree t;
387 tree ptype;
388 int hashcode;
389
390 /* Make a node of the sort we want. */
391 t = make_node (METHOD_TYPE);
392
393 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
394 TREE_TYPE (t) = rettype;
395 if (IS_SIGNATURE (basetype))
396 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
397 TYPE_READONLY (basetype),
398 TYPE_VOLATILE (basetype));
399 else
400 ptype = build_pointer_type (basetype);
401
402 /* The actual arglist for this function includes a "hidden" argument
403 which is "this". Put it into the list of argument types. */
404
405 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
406 TYPE_ARG_TYPES (t) = argtypes;
407 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
408
409 /* If we already have such a type, use the old one and free this one.
410 Note that it also frees up the above cons cell if found. */
411 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
412 t = type_hash_canon (hashcode, t);
413
414 if (TYPE_SIZE (t) == 0)
415 layout_type (t);
416
417 return t;
418 }
419
420 tree
421 build_cplus_staticfn_type (basetype, rettype, argtypes)
422 tree basetype, rettype, argtypes;
423 {
424 register tree t;
425 int hashcode;
426
427 /* Make a node of the sort we want. */
428 t = make_node (FUNCTION_TYPE);
429
430 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
431 TREE_TYPE (t) = rettype;
432
433 TYPE_ARG_TYPES (t) = argtypes;
434
435 /* If we already have such a type, use the old one and free this one.
436 Note that it also frees up the above cons cell if found. */
437 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
438 t = type_hash_canon (hashcode, t);
439
440 if (TYPE_SIZE (t) == 0)
441 layout_type (t);
442
443 return t;
444 }
445
446 tree
447 build_cplus_array_type (elt_type, index_type)
448 tree elt_type;
449 tree index_type;
450 {
451 register struct obstack *ambient_obstack = current_obstack;
452 register struct obstack *ambient_saveable_obstack = saveable_obstack;
453 tree t;
454
455 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
456 make this permanent too. */
457 if (TREE_PERMANENT (elt_type)
458 && (index_type == 0 || TREE_PERMANENT (index_type)))
459 {
460 current_obstack = &permanent_obstack;
461 saveable_obstack = &permanent_obstack;
462 }
463
464 t = build_array_type (elt_type, index_type);
465
466 /* Push these needs up so that initialization takes place
467 more easily. */
468 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
469 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
470 current_obstack = ambient_obstack;
471 saveable_obstack = ambient_saveable_obstack;
472 return t;
473 }
474 \f
475 /* Make a variant type in the proper way for C/C++, propagating qualifiers
476 down to the element type of an array. */
477
478 tree
479 cp_build_type_variant (type, constp, volatilep)
480 tree type;
481 int constp, volatilep;
482 {
483 if (TREE_CODE (type) == ARRAY_TYPE)
484 {
485 tree real_main_variant = TYPE_MAIN_VARIANT (type);
486
487 push_obstacks (TYPE_OBSTACK (real_main_variant),
488 TYPE_OBSTACK (real_main_variant));
489 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
490 constp, volatilep),
491 TYPE_DOMAIN (type));
492
493 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
494 make a copy. (TYPE might have come from the hash table and
495 REAL_MAIN_VARIANT might be in some function's obstack.) */
496
497 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
498 {
499 type = copy_node (type);
500 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
501 }
502
503 TYPE_MAIN_VARIANT (type) = real_main_variant;
504 pop_obstacks ();
505 }
506 return build_type_variant (type, constp, volatilep);
507 }
508 \f
509 /* Add OFFSET to all base types of T.
510
511 OFFSET, which is a type offset, is number of bytes.
512
513 Note that we don't have to worry about having two paths to the
514 same base type, since this type owns its association list. */
515 void
516 propagate_binfo_offsets (binfo, offset)
517 tree binfo;
518 tree offset;
519 {
520 tree binfos = BINFO_BASETYPES (binfo);
521 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
522
523 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
524 {
525 tree base_binfo = TREE_VEC_ELT (binfos, i);
526
527 if (TREE_VIA_VIRTUAL (base_binfo))
528 i += 1;
529 else
530 {
531 int j;
532 tree base_binfos = BINFO_BASETYPES (base_binfo);
533 tree delta;
534
535 for (j = i+1; j < n_baselinks; j++)
536 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
537 {
538 /* The next basetype offset must take into account the space
539 between the classes, not just the size of each class. */
540 delta = size_binop (MINUS_EXPR,
541 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
542 BINFO_OFFSET (base_binfo));
543 break;
544 }
545
546 #if 0
547 if (BINFO_OFFSET_ZEROP (base_binfo))
548 BINFO_OFFSET (base_binfo) = offset;
549 else
550 BINFO_OFFSET (base_binfo)
551 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
552 #else
553 BINFO_OFFSET (base_binfo) = offset;
554 #endif
555 if (base_binfos)
556 {
557 int k;
558 tree chain = NULL_TREE;
559
560 /* Now unshare the structure beneath BASE_BINFO. */
561 for (k = TREE_VEC_LENGTH (base_binfos)-1;
562 k >= 0; k--)
563 {
564 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
565 if (! TREE_VIA_VIRTUAL (base_base_binfo))
566 TREE_VEC_ELT (base_binfos, k)
567 = make_binfo (BINFO_OFFSET (base_base_binfo),
568 base_base_binfo,
569 BINFO_VTABLE (base_base_binfo),
570 BINFO_VIRTUALS (base_base_binfo),
571 chain);
572 chain = TREE_VEC_ELT (base_binfos, k);
573 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
574 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
575 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
576 }
577 /* Now propagate the offset to the base types. */
578 propagate_binfo_offsets (base_binfo, offset);
579 }
580
581 /* Go to our next class that counts for offset propagation. */
582 i = j;
583 if (i < n_baselinks)
584 offset = size_binop (PLUS_EXPR, offset, delta);
585 }
586 }
587 }
588
589 /* Compute the actual offsets that our virtual base classes
590 will have *for this type*. This must be performed after
591 the fields are laid out, since virtual baseclasses must
592 lay down at the end of the record.
593
594 Returns the maximum number of virtual functions any of the virtual
595 baseclasses provide. */
596 int
597 layout_vbasetypes (rec, max)
598 tree rec;
599 int max;
600 {
601 /* Get all the virtual base types that this type uses.
602 The TREE_VALUE slot holds the virtual baseclass type. */
603 tree vbase_types = get_vbase_types (rec);
604
605 #ifdef STRUCTURE_SIZE_BOUNDARY
606 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
607 #else
608 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
609 #endif
610 int desired_align;
611
612 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
613 where CONST_SIZE is an integer
614 and VAR_SIZE is a tree expression.
615 If VAR_SIZE is null, the size is just CONST_SIZE.
616 Naturally we try to avoid using VAR_SIZE. */
617 register unsigned const_size = 0;
618 register tree var_size = 0;
619 int nonvirtual_const_size;
620 tree nonvirtual_var_size;
621
622 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
623
624 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
625 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
626 else
627 var_size = TYPE_SIZE (rec);
628
629 nonvirtual_const_size = const_size;
630 nonvirtual_var_size = var_size;
631
632 while (vbase_types)
633 {
634 tree basetype = BINFO_TYPE (vbase_types);
635 tree offset;
636
637 desired_align = TYPE_ALIGN (basetype);
638 record_align = MAX (record_align, desired_align);
639
640 if (const_size == 0)
641 offset = integer_zero_node;
642 else
643 {
644 /* Give each virtual base type the alignment it wants. */
645 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
646 * TYPE_ALIGN (basetype);
647 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
648 }
649
650 if (CLASSTYPE_VSIZE (basetype) > max)
651 max = CLASSTYPE_VSIZE (basetype);
652 BINFO_OFFSET (vbase_types) = offset;
653
654 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
655 {
656 /* Every virtual baseclass takes a least a UNIT, so that we can
657 take it's address and get something different for each base. */
658 const_size += MAX (BITS_PER_UNIT,
659 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
660 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
661 }
662 else if (var_size == 0)
663 var_size = TYPE_SIZE (basetype);
664 else
665 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
666
667 vbase_types = TREE_CHAIN (vbase_types);
668 }
669
670 if (const_size)
671 {
672 /* Because a virtual base might take a single byte above,
673 we have to re-adjust the total size to make sure it it
674 a multiple of the alignment. */
675 /* Give the whole object the alignment it wants. */
676 const_size = CEIL (const_size, record_align) * record_align;
677 }
678
679 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
680 here, as that is for this class, without any virtual base classes. */
681 TYPE_ALIGN (rec) = record_align;
682 if (const_size != nonvirtual_const_size)
683 {
684 CLASSTYPE_VBASE_SIZE (rec)
685 = size_int (const_size - nonvirtual_const_size);
686 TYPE_SIZE (rec) = size_int (const_size);
687 }
688
689 /* Now propagate offset information throughout the lattice
690 under the vbase type. */
691 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
692 vbase_types = TREE_CHAIN (vbase_types))
693 {
694 tree base_binfos = BINFO_BASETYPES (vbase_types);
695
696 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
697
698 if (base_binfos)
699 {
700 tree chain = NULL_TREE;
701 int j;
702 /* Now unshare the structure beneath BASE_BINFO. */
703
704 for (j = TREE_VEC_LENGTH (base_binfos)-1;
705 j >= 0; j--)
706 {
707 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
708 if (! TREE_VIA_VIRTUAL (base_base_binfo))
709 TREE_VEC_ELT (base_binfos, j)
710 = make_binfo (BINFO_OFFSET (base_base_binfo),
711 base_base_binfo,
712 BINFO_VTABLE (base_base_binfo),
713 BINFO_VIRTUALS (base_base_binfo),
714 chain);
715 chain = TREE_VEC_ELT (base_binfos, j);
716 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
717 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
718 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
719 }
720
721 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
722 }
723 }
724
725 return max;
726 }
727
728 /* Lay out the base types of a record type, REC.
729 Tentatively set the size and alignment of REC
730 according to the base types alone.
731
732 Offsets for immediate nonvirtual baseclasses are also computed here.
733
734 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
735 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
736
737 Returns list of virtual base classes in a FIELD_DECL chain. */
738 tree
739 layout_basetypes (rec, binfos)
740 tree rec, binfos;
741 {
742 /* Chain to hold all the new FIELD_DECLs which point at virtual
743 base classes. */
744 tree vbase_decls = NULL_TREE;
745
746 #ifdef STRUCTURE_SIZE_BOUNDARY
747 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
748 #else
749 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
750 #endif
751
752 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
753 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
754 the size is just CONST_SIZE. Naturally we try to avoid using
755 VAR_SIZE. And so far, we've been sucessful. */
756 #if 0
757 register tree var_size = 0;
758 #endif
759
760 register unsigned const_size = 0;
761 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
762
763 /* Handle basetypes almost like fields, but record their
764 offsets differently. */
765
766 for (i = 0; i < n_baseclasses; i++)
767 {
768 int inc, desired_align, int_vbase_size;
769 register tree base_binfo = TREE_VEC_ELT (binfos, i);
770 register tree basetype = BINFO_TYPE (base_binfo);
771 tree decl, offset;
772
773 if (TYPE_SIZE (basetype) == 0)
774 {
775 #if 0
776 /* This error is now reported in xref_tag, thus giving better
777 location information. */
778 error_with_aggr_type (base_binfo,
779 "base class `%s' has incomplete type");
780
781 TREE_VIA_PUBLIC (base_binfo) = 1;
782 TREE_VIA_PROTECTED (base_binfo) = 0;
783 TREE_VIA_VIRTUAL (base_binfo) = 0;
784
785 /* Should handle this better so that
786
787 class A;
788 class B: private A { virtual void F(); };
789
790 does not dump core when compiled. */
791 my_friendly_abort (121);
792 #endif
793 continue;
794 }
795
796 /* All basetypes are recorded in the association list of the
797 derived type. */
798
799 if (TREE_VIA_VIRTUAL (base_binfo))
800 {
801 int j;
802 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
803 + sizeof (VBASE_NAME) + 1);
804
805 /* The offset for a virtual base class is only used in computing
806 virtual function tables and for initializing virtual base
807 pointers. It is built once `get_vbase_types' is called. */
808
809 /* If this basetype can come from another vbase pointer
810 without an additional indirection, we will share
811 that pointer. If an indirection is involved, we
812 make our own pointer. */
813 for (j = 0; j < n_baseclasses; j++)
814 {
815 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
816 if (! TREE_VIA_VIRTUAL (other_base_binfo)
817 && binfo_member (basetype,
818 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
819 goto got_it;
820 }
821 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
822 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
823 build_pointer_type (basetype));
824 /* If you change any of the below, take a look at all the
825 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
826 them too. */
827 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
828 DECL_VIRTUAL_P (decl) = 1;
829 DECL_FIELD_CONTEXT (decl) = rec;
830 DECL_CLASS_CONTEXT (decl) = rec;
831 DECL_FCONTEXT (decl) = basetype;
832 DECL_SAVED_INSNS (decl) = NULL_RTX;
833 DECL_FIELD_SIZE (decl) = 0;
834 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
835 TREE_CHAIN (decl) = vbase_decls;
836 BINFO_VPTR_FIELD (base_binfo) = decl;
837 vbase_decls = decl;
838
839 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
840 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
841 {
842 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
843 "destructor `%s' non-virtual");
844 warning ("in inheritance relationship `%s: virtual %s'",
845 TYPE_NAME_STRING (rec),
846 TYPE_NAME_STRING (basetype));
847 }
848 got_it:
849 /* The space this decl occupies has already been accounted for. */
850 continue;
851 }
852
853 if (const_size == 0)
854 offset = integer_zero_node;
855 else
856 {
857 /* Give each base type the alignment it wants. */
858 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
859 * TYPE_ALIGN (basetype);
860 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
861
862 #if 0
863 /* bpk: Disabled this check until someone is willing to
864 claim it as theirs and explain exactly what circumstances
865 warrant the warning. */
866 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
867 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
868 {
869 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
870 "destructor `%s' non-virtual");
871 warning ("in inheritance relationship `%s:%s %s'",
872 TYPE_NAME_STRING (rec),
873 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
874 TYPE_NAME_STRING (basetype));
875 }
876 #endif
877 }
878 BINFO_OFFSET (base_binfo) = offset;
879 if (CLASSTYPE_VSIZE (basetype))
880 {
881 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
882 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
883 }
884 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
885 TYPE_BINFO (rec) = base_binfo;
886
887 /* Add only the amount of storage not present in
888 the virtual baseclasses. */
889
890 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
891 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
892 {
893 inc = MAX (record_align,
894 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
895 - int_vbase_size));
896
897 /* Record must have at least as much alignment as any field. */
898 desired_align = TYPE_ALIGN (basetype);
899 record_align = MAX (record_align, desired_align);
900
901 const_size += inc;
902 }
903 }
904
905 if (const_size)
906 CLASSTYPE_SIZE (rec) = size_int (const_size);
907 else
908 CLASSTYPE_SIZE (rec) = integer_zero_node;
909 CLASSTYPE_ALIGN (rec) = record_align;
910
911 return vbase_decls;
912 }
913 \f
914 /* Hashing of lists so that we don't make duplicates.
915 The entry point is `list_hash_canon'. */
916
917 /* Each hash table slot is a bucket containing a chain
918 of these structures. */
919
920 struct list_hash
921 {
922 struct list_hash *next; /* Next structure in the bucket. */
923 int hashcode; /* Hash code of this list. */
924 tree list; /* The list recorded here. */
925 };
926
927 /* Now here is the hash table. When recording a list, it is added
928 to the slot whose index is the hash code mod the table size.
929 Note that the hash table is used for several kinds of lists.
930 While all these live in the same table, they are completely independent,
931 and the hash code is computed differently for each of these. */
932
933 #define TYPE_HASH_SIZE 59
934 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
935
936 /* Compute a hash code for a list (chain of TREE_LIST nodes
937 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
938 TREE_COMMON slots), by adding the hash codes of the individual entries. */
939
940 int
941 list_hash (list)
942 tree list;
943 {
944 register int hashcode = 0;
945
946 if (TREE_CHAIN (list))
947 hashcode += TYPE_HASH (TREE_CHAIN (list));
948
949 if (TREE_VALUE (list))
950 hashcode += TYPE_HASH (TREE_VALUE (list));
951 else
952 hashcode += 1007;
953 if (TREE_PURPOSE (list))
954 hashcode += TYPE_HASH (TREE_PURPOSE (list));
955 else
956 hashcode += 1009;
957 return hashcode;
958 }
959
960 /* Look in the type hash table for a type isomorphic to TYPE.
961 If one is found, return it. Otherwise return 0. */
962
963 tree
964 list_hash_lookup (hashcode, list)
965 int hashcode;
966 tree list;
967 {
968 register struct list_hash *h;
969 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
970 if (h->hashcode == hashcode
971 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
972 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
973 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
974 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
975 && TREE_VALUE (h->list) == TREE_VALUE (list)
976 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
977 {
978 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
979 return h->list;
980 }
981 return 0;
982 }
983
984 /* Add an entry to the list-hash-table
985 for a list TYPE whose hash code is HASHCODE. */
986
987 void
988 list_hash_add (hashcode, list)
989 int hashcode;
990 tree list;
991 {
992 register struct list_hash *h;
993
994 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
995 h->hashcode = hashcode;
996 h->list = list;
997 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
998 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
999 }
1000
1001 /* Given TYPE, and HASHCODE its hash code, return the canonical
1002 object for an identical list if one already exists.
1003 Otherwise, return TYPE, and record it as the canonical object
1004 if it is a permanent object.
1005
1006 To use this function, first create a list of the sort you want.
1007 Then compute its hash code from the fields of the list that
1008 make it different from other similar lists.
1009 Then call this function and use the value.
1010 This function frees the list you pass in if it is a duplicate. */
1011
1012 /* Set to 1 to debug without canonicalization. Never set by program. */
1013 static int debug_no_list_hash = 0;
1014
1015 tree
1016 list_hash_canon (hashcode, list)
1017 int hashcode;
1018 tree list;
1019 {
1020 tree t1;
1021
1022 if (debug_no_list_hash)
1023 return list;
1024
1025 t1 = list_hash_lookup (hashcode, list);
1026 if (t1 != 0)
1027 {
1028 obstack_free (&class_obstack, list);
1029 return t1;
1030 }
1031
1032 /* If this is a new list, record it for later reuse. */
1033 list_hash_add (hashcode, list);
1034
1035 return list;
1036 }
1037
1038 tree
1039 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1040 int via_public, via_virtual, via_protected;
1041 tree purpose, value, chain;
1042 {
1043 struct obstack *ambient_obstack = current_obstack;
1044 tree t;
1045 int hashcode;
1046
1047 current_obstack = &class_obstack;
1048 t = tree_cons (purpose, value, chain);
1049 TREE_VIA_PUBLIC (t) = via_public;
1050 TREE_VIA_PROTECTED (t) = via_protected;
1051 TREE_VIA_VIRTUAL (t) = via_virtual;
1052 hashcode = list_hash (t);
1053 t = list_hash_canon (hashcode, t);
1054 current_obstack = ambient_obstack;
1055 return t;
1056 }
1057
1058 /* Constructor for hashed lists. */
1059 tree
1060 hash_tree_chain (value, chain)
1061 tree value, chain;
1062 {
1063 struct obstack *ambient_obstack = current_obstack;
1064 tree t;
1065 int hashcode;
1066
1067 current_obstack = &class_obstack;
1068 t = tree_cons (NULL_TREE, value, chain);
1069 hashcode = list_hash (t);
1070 t = list_hash_canon (hashcode, t);
1071 current_obstack = ambient_obstack;
1072 return t;
1073 }
1074
1075 /* Similar, but used for concatenating two lists. */
1076 tree
1077 hash_chainon (list1, list2)
1078 tree list1, list2;
1079 {
1080 if (list2 == 0)
1081 return list1;
1082 if (list1 == 0)
1083 return list2;
1084 if (TREE_CHAIN (list1) == NULL_TREE)
1085 return hash_tree_chain (TREE_VALUE (list1), list2);
1086 return hash_tree_chain (TREE_VALUE (list1),
1087 hash_chainon (TREE_CHAIN (list1), list2));
1088 }
1089
1090 static tree
1091 get_identifier_list (value)
1092 tree value;
1093 {
1094 tree list = IDENTIFIER_AS_LIST (value);
1095 if (list != NULL_TREE
1096 && (TREE_CODE (list) != TREE_LIST
1097 || TREE_VALUE (list) != value))
1098 list = NULL_TREE;
1099 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1100 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1101 && IDENTIFIER_TYPE_VALUE (value)
1102 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1103 {
1104 tree type = IDENTIFIER_TYPE_VALUE (value);
1105
1106 if (TYPE_PTRMEMFUNC_P (type))
1107 list = NULL_TREE;
1108 else if (type == current_class_type)
1109 /* Don't mess up the constructor name. */
1110 list = tree_cons (NULL_TREE, value, NULL_TREE);
1111 else
1112 {
1113 register tree id;
1114 /* This will return the correct thing for regular types,
1115 nested types, and templates. Yay! */
1116 if (TYPE_NESTED_NAME (type))
1117 id = TYPE_NESTED_NAME (type);
1118 else
1119 id = TYPE_IDENTIFIER (type);
1120
1121 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1122 CLASSTYPE_ID_AS_LIST (type)
1123 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1124 list = CLASSTYPE_ID_AS_LIST (type);
1125 }
1126 }
1127 return list;
1128 }
1129
1130 tree
1131 get_decl_list (value)
1132 tree value;
1133 {
1134 tree list = NULL_TREE;
1135
1136 if (TREE_CODE (value) == IDENTIFIER_NODE)
1137 list = get_identifier_list (value);
1138 else if (TREE_CODE (value) == RECORD_TYPE
1139 && TYPE_LANG_SPECIFIC (value))
1140 list = CLASSTYPE_AS_LIST (value);
1141
1142 if (list != NULL_TREE)
1143 {
1144 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1145 return list;
1146 }
1147
1148 return build_decl_list (NULL_TREE, value);
1149 }
1150
1151 /* Look in the type hash table for a type isomorphic to
1152 `build_tree_list (NULL_TREE, VALUE)'.
1153 If one is found, return it. Otherwise return 0. */
1154
1155 tree
1156 list_hash_lookup_or_cons (value)
1157 tree value;
1158 {
1159 register int hashcode = TYPE_HASH (value);
1160 register struct list_hash *h;
1161 struct obstack *ambient_obstack;
1162 tree list = NULL_TREE;
1163
1164 if (TREE_CODE (value) == IDENTIFIER_NODE)
1165 list = get_identifier_list (value);
1166 else if (TREE_CODE (value) == TYPE_DECL
1167 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1168 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1169 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1170 else if (TREE_CODE (value) == RECORD_TYPE
1171 && TYPE_LANG_SPECIFIC (value))
1172 list = CLASSTYPE_AS_LIST (value);
1173
1174 if (list != NULL_TREE)
1175 {
1176 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1177 return list;
1178 }
1179
1180 if (debug_no_list_hash)
1181 return hash_tree_chain (value, NULL_TREE);
1182
1183 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1184 if (h->hashcode == hashcode
1185 && TREE_VIA_VIRTUAL (h->list) == 0
1186 && TREE_VIA_PUBLIC (h->list) == 0
1187 && TREE_VIA_PROTECTED (h->list) == 0
1188 && TREE_PURPOSE (h->list) == 0
1189 && TREE_VALUE (h->list) == value)
1190 {
1191 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1192 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1193 return h->list;
1194 }
1195
1196 ambient_obstack = current_obstack;
1197 current_obstack = &class_obstack;
1198 list = build_tree_list (NULL_TREE, value);
1199 list_hash_add (hashcode, list);
1200 current_obstack = ambient_obstack;
1201 return list;
1202 }
1203 \f
1204 /* Build an association between TYPE and some parameters:
1205
1206 OFFSET is the offset added to `this' to convert it to a pointer
1207 of type `TYPE *'
1208
1209 BINFO is the base binfo to use, if we are deriving from one. This
1210 is necessary, as we want specialized parent binfos from base
1211 classes, so that the VTABLE_NAMEs of bases are for the most derived
1212 type, instead of of the simple type.
1213
1214 VTABLE is the virtual function table with which to initialize
1215 sub-objects of type TYPE.
1216
1217 VIRTUALS are the virtual functions sitting in VTABLE.
1218
1219 CHAIN are more associations we must retain. */
1220
1221 tree
1222 make_binfo (offset, binfo, vtable, virtuals, chain)
1223 tree offset, binfo;
1224 tree vtable, virtuals;
1225 tree chain;
1226 {
1227 tree new_binfo = make_tree_vec (6);
1228 tree type;
1229
1230 if (TREE_CODE (binfo) == TREE_VEC)
1231 type = BINFO_TYPE (binfo);
1232 else
1233 {
1234 type = binfo;
1235 binfo = TYPE_BINFO (binfo);
1236 }
1237
1238 TREE_CHAIN (new_binfo) = chain;
1239 if (chain)
1240 TREE_USED (new_binfo) = TREE_USED (chain);
1241
1242 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1243 BINFO_OFFSET (new_binfo) = offset;
1244 BINFO_VTABLE (new_binfo) = vtable;
1245 BINFO_VIRTUALS (new_binfo) = virtuals;
1246 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1247
1248 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1249 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1250 return new_binfo;
1251 }
1252
1253 /* Return the binfo value for ELEM in TYPE. */
1254
1255 tree
1256 binfo_value (elem, type)
1257 tree elem;
1258 tree type;
1259 {
1260 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1261 compiler_error ("base class `%s' ambiguous in binfo_value",
1262 TYPE_NAME_STRING (elem));
1263 if (elem == type)
1264 return TYPE_BINFO (type);
1265 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1266 return type;
1267 return get_binfo (elem, type, 0);
1268 }
1269
1270 tree
1271 reverse_path (path)
1272 tree path;
1273 {
1274 register tree prev = 0, tmp, next;
1275 for (tmp = path; tmp; tmp = next)
1276 {
1277 next = BINFO_INHERITANCE_CHAIN (tmp);
1278 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1279 prev = tmp;
1280 }
1281 return prev;
1282 }
1283
1284 tree
1285 virtual_member (elem, list)
1286 tree elem;
1287 tree list;
1288 {
1289 tree t;
1290 tree rval, nval;
1291
1292 for (t = list; t; t = TREE_CHAIN (t))
1293 if (elem == BINFO_TYPE (t))
1294 return t;
1295 rval = 0;
1296 for (t = list; t; t = TREE_CHAIN (t))
1297 {
1298 tree binfos = BINFO_BASETYPES (t);
1299 int i;
1300
1301 if (binfos != NULL_TREE)
1302 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1303 {
1304 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1305 if (nval)
1306 {
1307 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1308 my_friendly_abort (104);
1309 rval = nval;
1310 }
1311 }
1312 }
1313 return rval;
1314 }
1315
1316 void
1317 debug_binfo (elem)
1318 tree elem;
1319 {
1320 int i;
1321 tree virtuals;
1322
1323 fprintf (stderr, "type \"%s\"; offset = %d\n",
1324 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1325 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1326 fprintf (stderr, "vtable type:\n");
1327 debug_tree (BINFO_TYPE (elem));
1328 if (BINFO_VTABLE (elem))
1329 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1330 else
1331 fprintf (stderr, "no vtable decl yet\n");
1332 fprintf (stderr, "virtuals:\n");
1333 virtuals = BINFO_VIRTUALS (elem);
1334 if (virtuals != 0)
1335 {
1336 /* skip the rtti type descriptor entry */
1337 virtuals = TREE_CHAIN (virtuals);
1338 }
1339 i = 1;
1340 while (virtuals)
1341 {
1342 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1343 fprintf (stderr, "%s [%d =? %d]\n",
1344 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1345 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1346 virtuals = TREE_CHAIN (virtuals);
1347 i += 1;
1348 }
1349 }
1350
1351 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1352 We expect a null pointer to mark the end of the chain.
1353 This is the Lisp primitive `length'. */
1354
1355 int
1356 decl_list_length (t)
1357 tree t;
1358 {
1359 register tree tail;
1360 register int len = 0;
1361
1362 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1363 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1364 for (tail = t; tail; tail = DECL_CHAIN (tail))
1365 len++;
1366
1367 return len;
1368 }
1369
1370 int
1371 count_functions (t)
1372 tree t;
1373 {
1374 if (TREE_CODE (t) == FUNCTION_DECL)
1375 return 1;
1376 else if (TREE_CODE (t) == TREE_LIST)
1377 return decl_list_length (TREE_VALUE (t));
1378
1379 my_friendly_abort (359);
1380 return 0;
1381 }
1382
1383 /* Like value_member, but for DECL_CHAINs. */
1384 tree
1385 decl_value_member (elem, list)
1386 tree elem, list;
1387 {
1388 while (list)
1389 {
1390 if (elem == list)
1391 return list;
1392 list = DECL_CHAIN (list);
1393 }
1394 return NULL_TREE;
1395 }
1396
1397 int
1398 is_overloaded_fn (x)
1399 tree x;
1400 {
1401 if (TREE_CODE (x) == FUNCTION_DECL)
1402 return 1;
1403
1404 if (TREE_CODE (x) == TREE_LIST
1405 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1406 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1407 return 1;
1408
1409 return 0;
1410 }
1411
1412 int
1413 really_overloaded_fn (x)
1414 tree x;
1415 {
1416 if (TREE_CODE (x) == TREE_LIST
1417 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1418 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1419 return 1;
1420
1421 return 0;
1422 }
1423
1424 tree
1425 get_first_fn (from)
1426 tree from;
1427 {
1428 if (TREE_CODE (from) == FUNCTION_DECL)
1429 return from;
1430
1431 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1432
1433 return TREE_VALUE (from);
1434 }
1435
1436 tree
1437 fnaddr_from_vtable_entry (entry)
1438 tree entry;
1439 {
1440 if (flag_vtable_thunks)
1441 {
1442 tree func = entry;
1443 if (TREE_CODE (func) == ADDR_EXPR)
1444 func = TREE_OPERAND (func, 0);
1445 if (TREE_CODE (func) == THUNK_DECL)
1446 return DECL_INITIAL (func);
1447 else
1448 return entry;
1449 }
1450 else
1451 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1452 }
1453
1454 void
1455 set_fnaddr_from_vtable_entry (entry, value)
1456 tree entry, value;
1457 {
1458 if (flag_vtable_thunks)
1459 abort ();
1460 else
1461 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1462 }
1463
1464 tree
1465 function_arg_chain (t)
1466 tree t;
1467 {
1468 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1469 }
1470
1471 int
1472 promotes_to_aggr_type (t, code)
1473 tree t;
1474 enum tree_code code;
1475 {
1476 if (TREE_CODE (t) == code)
1477 t = TREE_TYPE (t);
1478 return IS_AGGR_TYPE (t);
1479 }
1480
1481 int
1482 is_aggr_type_2 (t1, t2)
1483 tree t1, t2;
1484 {
1485 if (TREE_CODE (t1) != TREE_CODE (t2))
1486 return 0;
1487 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1488 }
1489
1490 /* Give message using types TYPE1 and TYPE2 as arguments.
1491 PFN is the function which will print the message;
1492 S is the format string for PFN to use. */
1493 void
1494 message_2_types (pfn, s, type1, type2)
1495 void (*pfn) ();
1496 char *s;
1497 tree type1, type2;
1498 {
1499 tree name1 = TYPE_NAME (type1);
1500 tree name2 = TYPE_NAME (type2);
1501 if (TREE_CODE (name1) == TYPE_DECL)
1502 name1 = DECL_NAME (name1);
1503 if (TREE_CODE (name2) == TYPE_DECL)
1504 name2 = DECL_NAME (name2);
1505 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1506 }
1507 \f
1508 #define PRINT_RING_SIZE 4
1509
1510 char *
1511 lang_printable_name (decl)
1512 tree decl;
1513 {
1514 static tree decl_ring[PRINT_RING_SIZE];
1515 static char *print_ring[PRINT_RING_SIZE];
1516 static int ring_counter;
1517 int i;
1518
1519 /* Only cache functions. */
1520 if (TREE_CODE (decl) != FUNCTION_DECL
1521 || DECL_LANG_SPECIFIC (decl) == 0)
1522 return decl_as_string (decl, 1);
1523
1524 /* See if this print name is lying around. */
1525 for (i = 0; i < PRINT_RING_SIZE; i++)
1526 if (decl_ring[i] == decl)
1527 /* yes, so return it. */
1528 return print_ring[i];
1529
1530 if (++ring_counter == PRINT_RING_SIZE)
1531 ring_counter = 0;
1532
1533 if (current_function_decl != NULL_TREE)
1534 {
1535 if (decl_ring[ring_counter] == current_function_decl)
1536 ring_counter += 1;
1537 if (ring_counter == PRINT_RING_SIZE)
1538 ring_counter = 0;
1539 if (decl_ring[ring_counter] == current_function_decl)
1540 my_friendly_abort (106);
1541 }
1542
1543 if (print_ring[ring_counter])
1544 free (print_ring[ring_counter]);
1545
1546 {
1547 int print_ret_type_p
1548 = (!DECL_CONSTRUCTOR_P (decl)
1549 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1550
1551 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1552 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1553 strcpy (print_ring[ring_counter], name);
1554 decl_ring[ring_counter] = decl;
1555 }
1556 return print_ring[ring_counter];
1557 }
1558 \f
1559 /* Comparison function for sorting identifiers in RAISES lists.
1560 Note that because IDENTIFIER_NODEs are unique, we can sort
1561 them by address, saving an indirection. */
1562 static int
1563 id_cmp (p1, p2)
1564 tree *p1, *p2;
1565 {
1566 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1567 }
1568
1569 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1570 listed in RAISES. */
1571 tree
1572 build_exception_variant (ctype, type, raises)
1573 tree ctype, type;
1574 tree raises;
1575 {
1576 int i;
1577 tree v = TYPE_MAIN_VARIANT (type);
1578 tree t, t2, cname;
1579 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1580 int constp = TYPE_READONLY (type);
1581 int volatilep = TYPE_VOLATILE (type);
1582
1583 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1584 {
1585 if (TYPE_READONLY (v) != constp
1586 || TYPE_VOLATILE (v) != volatilep)
1587 continue;
1588
1589 /* @@ This should do set equality, not exact match. */
1590 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1591 /* List of exceptions raised matches previously found list.
1592
1593 @@ Nice to free up storage used in consing up the
1594 @@ list of exceptions raised. */
1595 return v;
1596 }
1597
1598 /* Need to build a new variant. */
1599 v = copy_node (type);
1600 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1601 TYPE_NEXT_VARIANT (type) = v;
1602 if (raises && ! TREE_PERMANENT (raises))
1603 {
1604 push_obstacks_nochange ();
1605 end_temporary_allocation ();
1606 raises = copy_list (raises);
1607 pop_obstacks ();
1608 }
1609 TYPE_RAISES_EXCEPTIONS (v) = raises;
1610 return v;
1611 }
1612
1613 /* Subroutine of copy_to_permanent
1614
1615 Assuming T is a node build bottom-up, make it all exist on
1616 permanent obstack, if it is not permanent already. */
1617
1618 tree
1619 mapcar (t, func)
1620 tree t;
1621 tree (*func)();
1622 {
1623 enum tree_code code;
1624 tree tmp;
1625
1626 if (t == NULL_TREE)
1627 return t;
1628
1629 if (tmp = func (t), tmp != NULL_TREE)
1630 return tmp;
1631
1632 switch (code = TREE_CODE (t))
1633 {
1634 case ERROR_MARK:
1635 return error_mark_node;
1636
1637 case VAR_DECL:
1638 case FUNCTION_DECL:
1639 case CONST_DECL:
1640 break;
1641
1642 case PARM_DECL:
1643 {
1644 tree chain = TREE_CHAIN (t);
1645 t = copy_node (t);
1646 TREE_CHAIN (t) = mapcar (chain, func);
1647 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1648 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1649 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1650 return t;
1651 }
1652
1653 case TREE_LIST:
1654 {
1655 tree chain = TREE_CHAIN (t);
1656 t = copy_node (t);
1657 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1658 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1659 TREE_CHAIN (t) = mapcar (chain, func);
1660 return t;
1661 }
1662
1663 case TREE_VEC:
1664 {
1665 int len = TREE_VEC_LENGTH (t);
1666
1667 t = copy_node (t);
1668 while (len--)
1669 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1670 return t;
1671 }
1672
1673 case INTEGER_CST:
1674 case REAL_CST:
1675 case STRING_CST:
1676 return copy_node (t);
1677
1678 case COND_EXPR:
1679 case TARGET_EXPR:
1680 case NEW_EXPR:
1681 t = copy_node (t);
1682 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1683 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1684 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1685 return t;
1686
1687 case SAVE_EXPR:
1688 t = copy_node (t);
1689 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1690 return t;
1691
1692 case MODIFY_EXPR:
1693 case PLUS_EXPR:
1694 case MINUS_EXPR:
1695 case MULT_EXPR:
1696 case TRUNC_DIV_EXPR:
1697 case TRUNC_MOD_EXPR:
1698 case MIN_EXPR:
1699 case MAX_EXPR:
1700 case LSHIFT_EXPR:
1701 case RSHIFT_EXPR:
1702 case BIT_IOR_EXPR:
1703 case BIT_XOR_EXPR:
1704 case BIT_AND_EXPR:
1705 case BIT_ANDTC_EXPR:
1706 case TRUTH_ANDIF_EXPR:
1707 case TRUTH_ORIF_EXPR:
1708 case LT_EXPR:
1709 case LE_EXPR:
1710 case GT_EXPR:
1711 case GE_EXPR:
1712 case EQ_EXPR:
1713 case NE_EXPR:
1714 case CEIL_DIV_EXPR:
1715 case FLOOR_DIV_EXPR:
1716 case ROUND_DIV_EXPR:
1717 case CEIL_MOD_EXPR:
1718 case FLOOR_MOD_EXPR:
1719 case ROUND_MOD_EXPR:
1720 case COMPOUND_EXPR:
1721 case PREDECREMENT_EXPR:
1722 case PREINCREMENT_EXPR:
1723 case POSTDECREMENT_EXPR:
1724 case POSTINCREMENT_EXPR:
1725 case CALL_EXPR:
1726 t = copy_node (t);
1727 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1728 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1729 return t;
1730
1731 case CONVERT_EXPR:
1732 case ADDR_EXPR:
1733 case INDIRECT_REF:
1734 case NEGATE_EXPR:
1735 case BIT_NOT_EXPR:
1736 case TRUTH_NOT_EXPR:
1737 case NOP_EXPR:
1738 case COMPONENT_REF:
1739 t = copy_node (t);
1740 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1741 return t;
1742
1743 case POINTER_TYPE:
1744 return build_pointer_type (mapcar (TREE_TYPE (t), func));
1745 case REFERENCE_TYPE:
1746 return build_reference_type (mapcar (TREE_TYPE (t), func));
1747 case FUNCTION_TYPE:
1748 return build_function_type (mapcar (TREE_TYPE (t), func),
1749 mapcar (TYPE_ARG_TYPES (t), func));
1750 case ARRAY_TYPE:
1751 return build_array_type (mapcar (TREE_TYPE (t), func),
1752 mapcar (TYPE_DOMAIN (t), func));
1753 case INTEGER_TYPE:
1754 return build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1755
1756 case OFFSET_TYPE:
1757 return build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1758 mapcar (TREE_TYPE (t), func));
1759 case METHOD_TYPE:
1760 return build_method_type
1761 (mapcar (TYPE_METHOD_BASETYPE (t), func),
1762 build_function_type
1763 (mapcar (TREE_TYPE (t), func),
1764 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func)));
1765
1766 case RECORD_TYPE:
1767 if (TYPE_PTRMEMFUNC_P (t))
1768 return build_ptrmemfunc_type
1769 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1770 /* else fall through */
1771
1772 /* This list is incomplete, but should suffice for now.
1773 It is very important that `sorry' does not call
1774 `report_error_function'. That could cause an infinite loop. */
1775 default:
1776 sorry ("initializer contains unrecognized tree code");
1777 return error_mark_node;
1778
1779 }
1780 my_friendly_abort (107);
1781 /* NOTREACHED */
1782 return NULL_TREE;
1783 }
1784
1785 static tree
1786 perm_manip (t)
1787 tree t;
1788 {
1789 if (TREE_PERMANENT (t))
1790 return t;
1791 return NULL_TREE;
1792 }
1793
1794 /* Assuming T is a node built bottom-up, make it all exist on
1795 permanent obstack, if it is not permanent already. */
1796 tree
1797 copy_to_permanent (t)
1798 tree t;
1799 {
1800 register struct obstack *ambient_obstack = current_obstack;
1801 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1802 int resume;
1803
1804 if (t == NULL_TREE || TREE_PERMANENT (t))
1805 return t;
1806
1807 saveable_obstack = &permanent_obstack;
1808 current_obstack = saveable_obstack;
1809 resume = suspend_momentary ();
1810
1811 t = mapcar (t, perm_manip);
1812
1813 resume_momentary (resume);
1814 current_obstack = ambient_obstack;
1815 saveable_obstack = ambient_saveable_obstack;
1816
1817 return t;
1818 }
1819
1820 void
1821 print_lang_statistics ()
1822 {
1823 extern struct obstack maybepermanent_obstack;
1824 print_obstack_statistics ("class_obstack", &class_obstack);
1825 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1826 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1827 print_search_statistics ();
1828 print_class_statistics ();
1829 }
1830
1831 /* This is used by the `assert' macro. It is provided in libgcc.a,
1832 which `cc' doesn't know how to link. Note that the C++ front-end
1833 no longer actually uses the `assert' macro (instead, it calls
1834 my_friendly_assert). But all of the back-end files still need this. */
1835 void
1836 __eprintf (string, expression, line, filename)
1837 #ifdef __STDC__
1838 const char *string;
1839 const char *expression;
1840 unsigned line;
1841 const char *filename;
1842 #else
1843 char *string;
1844 char *expression;
1845 unsigned line;
1846 char *filename;
1847 #endif
1848 {
1849 fprintf (stderr, string, expression, line, filename);
1850 fflush (stderr);
1851 abort ();
1852 }
1853
1854 /* Return, as an INTEGER_CST node, the number of elements for
1855 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1856
1857 tree
1858 array_type_nelts_top (type)
1859 tree type;
1860 {
1861 return fold (build (PLUS_EXPR, sizetype,
1862 array_type_nelts (type),
1863 integer_one_node));
1864 }
1865
1866 /* Return, as an INTEGER_CST node, the number of elements for
1867 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1868 ARRAY_TYPEs that are clumped together. */
1869
1870 tree
1871 array_type_nelts_total (type)
1872 tree type;
1873 {
1874 tree sz = array_type_nelts_top (type);
1875 type = TREE_TYPE (type);
1876 while (TREE_CODE (type) == ARRAY_TYPE)
1877 {
1878 tree n = array_type_nelts_top (type);
1879 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1880 type = TREE_TYPE (type);
1881 }
1882 return sz;
1883 }
1884
1885 static
1886 tree
1887 bot_manip (t)
1888 tree t;
1889 {
1890 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1891 return t;
1892 else if (TREE_CODE (t) == TARGET_EXPR)
1893 return build_cplus_new (TREE_TYPE (t),
1894 break_out_target_exprs (TREE_OPERAND (t, 1)), 0);
1895 return NULL_TREE;
1896 }
1897
1898 /* Actually, we'll just clean out the target exprs for the moment. */
1899 tree
1900 break_out_target_exprs (t)
1901 tree t;
1902 {
1903 return mapcar (t, bot_manip);
1904 }