59th Cygnus<->FSF merge
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21 #include "config.h"
22 #include <stdio.h>
23 #include "obstack.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "flags.h"
27 #include "rtl.h"
28
29 #define CEIL(x,y) (((x) + (y) - 1) / (y))
30
31 /* Return nonzero if REF is an lvalue valid for this language.
32 Lvalues can be assigned, unless they have TREE_READONLY.
33 Lvalues can have their address taken, unless they have DECL_REGISTER. */
34
35 int
36 real_lvalue_p (ref)
37 tree ref;
38 {
39 if (! language_lvalue_valid (ref))
40 return 0;
41
42 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
43 return 1;
44
45 if (ref == current_class_decl && flag_this_is_variable <= 0)
46 return 0;
47
48 switch (TREE_CODE (ref))
49 {
50 /* preincrements and predecrements are valid lvals, provided
51 what they refer to are valid lvals. */
52 case PREINCREMENT_EXPR:
53 case PREDECREMENT_EXPR:
54 case COMPONENT_REF:
55 case SAVE_EXPR:
56 return real_lvalue_p (TREE_OPERAND (ref, 0));
57
58 case STRING_CST:
59 return 1;
60
61 case VAR_DECL:
62 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
63 && DECL_LANG_SPECIFIC (ref)
64 && DECL_IN_AGGR_P (ref))
65 return 0;
66 case INDIRECT_REF:
67 case ARRAY_REF:
68 case PARM_DECL:
69 case RESULT_DECL:
70 case ERROR_MARK:
71 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
72 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
73 return 1;
74 break;
75
76 case WITH_CLEANUP_EXPR:
77 return real_lvalue_p (TREE_OPERAND (ref, 0));
78
79 /* A currently unresolved scope ref. */
80 case SCOPE_REF:
81 my_friendly_abort (103);
82 case OFFSET_REF:
83 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
84 return 1;
85 return real_lvalue_p (TREE_OPERAND (ref, 0))
86 && real_lvalue_p (TREE_OPERAND (ref, 1));
87 break;
88
89 case COND_EXPR:
90 return (real_lvalue_p (TREE_OPERAND (ref, 1))
91 && real_lvalue_p (TREE_OPERAND (ref, 2)));
92
93 case MODIFY_EXPR:
94 return 1;
95
96 case COMPOUND_EXPR:
97 return real_lvalue_p (TREE_OPERAND (ref, 1));
98
99 case MAX_EXPR:
100 case MIN_EXPR:
101 return (real_lvalue_p (TREE_OPERAND (ref, 0))
102 && real_lvalue_p (TREE_OPERAND (ref, 1)));
103 }
104
105 return 0;
106 }
107
108 int
109 lvalue_p (ref)
110 tree ref;
111 {
112 if (! language_lvalue_valid (ref))
113 return 0;
114
115 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
116 return 1;
117
118 if (ref == current_class_decl && flag_this_is_variable <= 0)
119 return 0;
120
121 switch (TREE_CODE (ref))
122 {
123 /* preincrements and predecrements are valid lvals, provided
124 what they refer to are valid lvals. */
125 case PREINCREMENT_EXPR:
126 case PREDECREMENT_EXPR:
127 case COMPONENT_REF:
128 case SAVE_EXPR:
129 return lvalue_p (TREE_OPERAND (ref, 0));
130
131 case STRING_CST:
132 return 1;
133
134 case VAR_DECL:
135 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
136 && DECL_LANG_SPECIFIC (ref)
137 && DECL_IN_AGGR_P (ref))
138 return 0;
139 case INDIRECT_REF:
140 case ARRAY_REF:
141 case PARM_DECL:
142 case RESULT_DECL:
143 case ERROR_MARK:
144 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
145 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
146 return 1;
147 break;
148
149 case WITH_CLEANUP_EXPR:
150 return lvalue_p (TREE_OPERAND (ref, 0));
151
152 case TARGET_EXPR:
153 return 1;
154
155 case CALL_EXPR:
156 if (TREE_ADDRESSABLE (TREE_TYPE (ref)))
157 return 1;
158 break;
159
160 /* A currently unresolved scope ref. */
161 case SCOPE_REF:
162 my_friendly_abort (103);
163 case OFFSET_REF:
164 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
165 return 1;
166 return lvalue_p (TREE_OPERAND (ref, 0))
167 && lvalue_p (TREE_OPERAND (ref, 1));
168 break;
169
170 case COND_EXPR:
171 return (lvalue_p (TREE_OPERAND (ref, 1))
172 && lvalue_p (TREE_OPERAND (ref, 2)));
173
174 case MODIFY_EXPR:
175 return 1;
176
177 case COMPOUND_EXPR:
178 return lvalue_p (TREE_OPERAND (ref, 1));
179
180 case MAX_EXPR:
181 case MIN_EXPR:
182 return (lvalue_p (TREE_OPERAND (ref, 0))
183 && lvalue_p (TREE_OPERAND (ref, 1)));
184 }
185
186 return 0;
187 }
188
189 /* Return nonzero if REF is an lvalue valid for this language;
190 otherwise, print an error message and return zero. */
191
192 int
193 lvalue_or_else (ref, string)
194 tree ref;
195 char *string;
196 {
197 int win = lvalue_p (ref);
198 if (! win)
199 error ("non-lvalue in %s", string);
200 return win;
201 }
202
203 /* INIT is a CALL_EXPR which needs info about its target.
204 TYPE is the type that this initialization should appear to have.
205
206 Build an encapsulation of the initialization to perform
207 and return it so that it can be processed by language-independent
208 and language-specific expression expanders.
209
210 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
211 Otherwise, cleanups are not built here. For example, when building
212 an initialization for a stack slot, since the called function handles
213 the cleanup, we would not want to do it here. */
214 tree
215 build_cplus_new (type, init, with_cleanup_p)
216 tree type;
217 tree init;
218 int with_cleanup_p;
219 {
220 tree slot = build (VAR_DECL, type);
221 tree rval = build (NEW_EXPR, type,
222 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
223 TREE_SIDE_EFFECTS (rval) = 1;
224 TREE_ADDRESSABLE (rval) = 1;
225 rval = build (TARGET_EXPR, type, slot, rval, 0);
226 TREE_SIDE_EFFECTS (rval) = 1;
227 TREE_ADDRESSABLE (rval) = 1;
228
229 #if 0
230 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
231 {
232 TREE_OPERAND (rval, 2) = error_mark_node;
233 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
234 build_delete (TYPE_POINTER_TO (type),
235 build_unary_op (ADDR_EXPR, slot, 0),
236 integer_two_node,
237 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
238 TREE_SIDE_EFFECTS (rval) = 1;
239 TREE_ADDRESSABLE (rval) = 1;
240 }
241 #endif
242 return rval;
243 }
244
245 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
246 these CALL_EXPRs with tree nodes that will perform the cleanups. */
247
248 tree
249 break_out_cleanups (exp)
250 tree exp;
251 {
252 tree tmp = exp;
253
254 if (TREE_CODE (tmp) == CALL_EXPR
255 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
256 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
257
258 while (TREE_CODE (tmp) == NOP_EXPR
259 || TREE_CODE (tmp) == CONVERT_EXPR
260 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
261 {
262 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
263 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
264 {
265 TREE_OPERAND (tmp, 0)
266 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
267 TREE_OPERAND (tmp, 0), 1);
268 break;
269 }
270 else
271 tmp = TREE_OPERAND (tmp, 0);
272 }
273 return exp;
274 }
275
276 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
277 copies where they are found. Returns a deep copy all nodes transitively
278 containing CALL_EXPRs. */
279
280 tree
281 break_out_calls (exp)
282 tree exp;
283 {
284 register tree t1, t2;
285 register enum tree_code code;
286 register int changed = 0;
287 register int i;
288
289 if (exp == NULL_TREE)
290 return exp;
291
292 code = TREE_CODE (exp);
293
294 if (code == CALL_EXPR)
295 return copy_node (exp);
296
297 /* Don't try and defeat a save_expr, as it should only be done once. */
298 if (code == SAVE_EXPR)
299 return exp;
300
301 switch (TREE_CODE_CLASS (code))
302 {
303 default:
304 abort ();
305
306 case 'c': /* a constant */
307 case 't': /* a type node */
308 case 'x': /* something random, like an identifier or an ERROR_MARK. */
309 return exp;
310
311 case 'd': /* A decl node */
312 #if 0 /* This is bogus. jason 9/21/94 */
313
314 t1 = break_out_calls (DECL_INITIAL (exp));
315 if (t1 != DECL_INITIAL (exp))
316 {
317 exp = copy_node (exp);
318 DECL_INITIAL (exp) = t1;
319 }
320 #endif
321 return exp;
322
323 case 'b': /* A block node */
324 {
325 /* Don't know how to handle these correctly yet. Must do a
326 break_out_calls on all DECL_INITIAL values for local variables,
327 and also break_out_calls on all sub-blocks and sub-statements. */
328 abort ();
329 }
330 return exp;
331
332 case 'e': /* an expression */
333 case 'r': /* a reference */
334 case 's': /* an expression with side effects */
335 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
336 {
337 t1 = break_out_calls (TREE_OPERAND (exp, i));
338 if (t1 != TREE_OPERAND (exp, i))
339 {
340 exp = copy_node (exp);
341 TREE_OPERAND (exp, i) = t1;
342 }
343 }
344 return exp;
345
346 case '<': /* a comparison expression */
347 case '2': /* a binary arithmetic expression */
348 t2 = break_out_calls (TREE_OPERAND (exp, 1));
349 if (t2 != TREE_OPERAND (exp, 1))
350 changed = 1;
351 case '1': /* a unary arithmetic expression */
352 t1 = break_out_calls (TREE_OPERAND (exp, 0));
353 if (t1 != TREE_OPERAND (exp, 0))
354 changed = 1;
355 if (changed)
356 {
357 if (tree_code_length[(int) code] == 1)
358 return build1 (code, TREE_TYPE (exp), t1);
359 else
360 return build (code, TREE_TYPE (exp), t1, t2);
361 }
362 return exp;
363 }
364
365 }
366 \f
367 extern struct obstack *current_obstack;
368 extern struct obstack permanent_obstack, class_obstack;
369 extern struct obstack *saveable_obstack;
370
371 /* Here is how primitive or already-canonicalized types' hash
372 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
373 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
374
375 /* Construct, lay out and return the type of methods belonging to class
376 BASETYPE and whose arguments are described by ARGTYPES and whose values
377 are described by RETTYPE. If each type exists already, reuse it. */
378 tree
379 build_cplus_method_type (basetype, rettype, argtypes)
380 tree basetype, rettype, argtypes;
381 {
382 register tree t;
383 tree ptype;
384 int hashcode;
385
386 /* Make a node of the sort we want. */
387 t = make_node (METHOD_TYPE);
388
389 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
390 TREE_TYPE (t) = rettype;
391 if (IS_SIGNATURE (basetype))
392 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
393 TYPE_READONLY (basetype),
394 TYPE_VOLATILE (basetype));
395 else
396 {
397 ptype = build_pointer_type (basetype);
398 ptype = build_type_variant (ptype, 1, 0);
399 }
400 /* The actual arglist for this function includes a "hidden" argument
401 which is "this". Put it into the list of argument types. */
402
403 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
404 TYPE_ARG_TYPES (t) = argtypes;
405 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
406
407 /* If we already have such a type, use the old one and free this one.
408 Note that it also frees up the above cons cell if found. */
409 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
410 t = type_hash_canon (hashcode, t);
411
412 if (TYPE_SIZE (t) == 0)
413 layout_type (t);
414
415 return t;
416 }
417
418 tree
419 build_cplus_staticfn_type (basetype, rettype, argtypes)
420 tree basetype, rettype, argtypes;
421 {
422 register tree t;
423 int hashcode;
424
425 /* Make a node of the sort we want. */
426 t = make_node (FUNCTION_TYPE);
427
428 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
429 TREE_TYPE (t) = rettype;
430
431 TYPE_ARG_TYPES (t) = argtypes;
432
433 /* If we already have such a type, use the old one and free this one.
434 Note that it also frees up the above cons cell if found. */
435 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
436 t = type_hash_canon (hashcode, t);
437
438 if (TYPE_SIZE (t) == 0)
439 layout_type (t);
440
441 return t;
442 }
443
444 tree
445 build_cplus_array_type (elt_type, index_type)
446 tree elt_type;
447 tree index_type;
448 {
449 register struct obstack *ambient_obstack = current_obstack;
450 register struct obstack *ambient_saveable_obstack = saveable_obstack;
451 tree t;
452
453 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
454 make this permanent too. */
455 if (TREE_PERMANENT (elt_type)
456 && (index_type == 0 || TREE_PERMANENT (index_type)))
457 {
458 current_obstack = &permanent_obstack;
459 saveable_obstack = &permanent_obstack;
460 }
461
462 t = build_array_type (elt_type, index_type);
463
464 /* Push these needs up so that initialization takes place
465 more easily. */
466 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
467 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
468 current_obstack = ambient_obstack;
469 saveable_obstack = ambient_saveable_obstack;
470 return t;
471 }
472 \f
473 /* Make a variant type in the proper way for C/C++, propagating qualifiers
474 down to the element type of an array. */
475
476 tree
477 cp_build_type_variant (type, constp, volatilep)
478 tree type;
479 int constp, volatilep;
480 {
481 if (TREE_CODE (type) == ARRAY_TYPE)
482 {
483 tree real_main_variant = TYPE_MAIN_VARIANT (type);
484
485 push_obstacks (TYPE_OBSTACK (real_main_variant),
486 TYPE_OBSTACK (real_main_variant));
487 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
488 constp, volatilep),
489 TYPE_DOMAIN (type));
490
491 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
492 make a copy. (TYPE might have come from the hash table and
493 REAL_MAIN_VARIANT might be in some function's obstack.) */
494
495 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
496 {
497 type = copy_node (type);
498 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
499 }
500
501 TYPE_MAIN_VARIANT (type) = real_main_variant;
502 pop_obstacks ();
503 }
504 return build_type_variant (type, constp, volatilep);
505 }
506 \f
507 /* Add OFFSET to all base types of T.
508
509 OFFSET, which is a type offset, is number of bytes.
510
511 Note that we don't have to worry about having two paths to the
512 same base type, since this type owns its association list. */
513 void
514 propagate_binfo_offsets (binfo, offset)
515 tree binfo;
516 tree offset;
517 {
518 tree binfos = BINFO_BASETYPES (binfo);
519 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
520
521 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
522 {
523 tree base_binfo = TREE_VEC_ELT (binfos, i);
524
525 if (TREE_VIA_VIRTUAL (base_binfo))
526 i += 1;
527 else
528 {
529 int j;
530 tree base_binfos = BINFO_BASETYPES (base_binfo);
531 tree delta;
532
533 for (j = i+1; j < n_baselinks; j++)
534 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
535 {
536 /* The next basetype offset must take into account the space
537 between the classes, not just the size of each class. */
538 delta = size_binop (MINUS_EXPR,
539 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
540 BINFO_OFFSET (base_binfo));
541 break;
542 }
543
544 #if 0
545 if (BINFO_OFFSET_ZEROP (base_binfo))
546 BINFO_OFFSET (base_binfo) = offset;
547 else
548 BINFO_OFFSET (base_binfo)
549 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
550 #else
551 BINFO_OFFSET (base_binfo) = offset;
552 #endif
553 if (base_binfos)
554 {
555 int k;
556 tree chain = NULL_TREE;
557
558 /* Now unshare the structure beneath BASE_BINFO. */
559 for (k = TREE_VEC_LENGTH (base_binfos)-1;
560 k >= 0; k--)
561 {
562 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
563 if (! TREE_VIA_VIRTUAL (base_base_binfo))
564 TREE_VEC_ELT (base_binfos, k)
565 = make_binfo (BINFO_OFFSET (base_base_binfo),
566 base_base_binfo,
567 BINFO_VTABLE (base_base_binfo),
568 BINFO_VIRTUALS (base_base_binfo),
569 chain);
570 chain = TREE_VEC_ELT (base_binfos, k);
571 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
572 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
573 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
574 }
575 /* Now propagate the offset to the base types. */
576 propagate_binfo_offsets (base_binfo, offset);
577 }
578
579 /* Go to our next class that counts for offset propagation. */
580 i = j;
581 if (i < n_baselinks)
582 offset = size_binop (PLUS_EXPR, offset, delta);
583 }
584 }
585 }
586
587 /* Compute the actual offsets that our virtual base classes
588 will have *for this type*. This must be performed after
589 the fields are laid out, since virtual baseclasses must
590 lay down at the end of the record.
591
592 Returns the maximum number of virtual functions any of the virtual
593 baseclasses provide. */
594 int
595 layout_vbasetypes (rec, max)
596 tree rec;
597 int max;
598 {
599 /* Get all the virtual base types that this type uses.
600 The TREE_VALUE slot holds the virtual baseclass type. */
601 tree vbase_types = get_vbase_types (rec);
602
603 #ifdef STRUCTURE_SIZE_BOUNDARY
604 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
605 #else
606 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
607 #endif
608 int desired_align;
609
610 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
611 where CONST_SIZE is an integer
612 and VAR_SIZE is a tree expression.
613 If VAR_SIZE is null, the size is just CONST_SIZE.
614 Naturally we try to avoid using VAR_SIZE. */
615 register unsigned const_size = 0;
616 register tree var_size = 0;
617 int nonvirtual_const_size;
618 tree nonvirtual_var_size;
619
620 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
621
622 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
623 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
624 else
625 var_size = TYPE_SIZE (rec);
626
627 nonvirtual_const_size = const_size;
628 nonvirtual_var_size = var_size;
629
630 while (vbase_types)
631 {
632 tree basetype = BINFO_TYPE (vbase_types);
633 tree offset;
634
635 desired_align = TYPE_ALIGN (basetype);
636 record_align = MAX (record_align, desired_align);
637
638 if (const_size == 0)
639 offset = integer_zero_node;
640 else
641 {
642 /* Give each virtual base type the alignment it wants. */
643 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
644 * TYPE_ALIGN (basetype);
645 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
646 }
647
648 if (CLASSTYPE_VSIZE (basetype) > max)
649 max = CLASSTYPE_VSIZE (basetype);
650 BINFO_OFFSET (vbase_types) = offset;
651
652 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
653 {
654 /* Every virtual baseclass takes a least a UNIT, so that we can
655 take it's address and get something different for each base. */
656 const_size += MAX (BITS_PER_UNIT,
657 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
658 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
659 }
660 else if (var_size == 0)
661 var_size = TYPE_SIZE (basetype);
662 else
663 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
664
665 vbase_types = TREE_CHAIN (vbase_types);
666 }
667
668 if (const_size)
669 {
670 /* Because a virtual base might take a single byte above,
671 we have to re-adjust the total size to make sure it it
672 a multiple of the alignment. */
673 /* Give the whole object the alignment it wants. */
674 const_size = CEIL (const_size, record_align) * record_align;
675 }
676
677 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
678 here, as that is for this class, without any virtual base classes. */
679 TYPE_ALIGN (rec) = record_align;
680 if (const_size != nonvirtual_const_size)
681 {
682 CLASSTYPE_VBASE_SIZE (rec)
683 = size_int (const_size - nonvirtual_const_size);
684 TYPE_SIZE (rec) = size_int (const_size);
685 }
686
687 /* Now propagate offset information throughout the lattice
688 under the vbase type. */
689 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
690 vbase_types = TREE_CHAIN (vbase_types))
691 {
692 tree base_binfos = BINFO_BASETYPES (vbase_types);
693
694 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
695
696 if (base_binfos)
697 {
698 tree chain = NULL_TREE;
699 int j;
700 /* Now unshare the structure beneath BASE_BINFO. */
701
702 for (j = TREE_VEC_LENGTH (base_binfos)-1;
703 j >= 0; j--)
704 {
705 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
706 if (! TREE_VIA_VIRTUAL (base_base_binfo))
707 TREE_VEC_ELT (base_binfos, j)
708 = make_binfo (BINFO_OFFSET (base_base_binfo),
709 base_base_binfo,
710 BINFO_VTABLE (base_base_binfo),
711 BINFO_VIRTUALS (base_base_binfo),
712 chain);
713 chain = TREE_VEC_ELT (base_binfos, j);
714 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
715 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
716 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
717 }
718
719 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
720 }
721 }
722
723 return max;
724 }
725
726 /* Lay out the base types of a record type, REC.
727 Tentatively set the size and alignment of REC
728 according to the base types alone.
729
730 Offsets for immediate nonvirtual baseclasses are also computed here.
731
732 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
733 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
734
735 Returns list of virtual base classes in a FIELD_DECL chain. */
736 tree
737 layout_basetypes (rec, binfos)
738 tree rec, binfos;
739 {
740 /* Chain to hold all the new FIELD_DECLs which point at virtual
741 base classes. */
742 tree vbase_decls = NULL_TREE;
743
744 #ifdef STRUCTURE_SIZE_BOUNDARY
745 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
746 #else
747 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
748 #endif
749
750 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
751 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
752 the size is just CONST_SIZE. Naturally we try to avoid using
753 VAR_SIZE. And so far, we've been sucessful. */
754 #if 0
755 register tree var_size = 0;
756 #endif
757
758 register unsigned const_size = 0;
759 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
760
761 /* Handle basetypes almost like fields, but record their
762 offsets differently. */
763
764 for (i = 0; i < n_baseclasses; i++)
765 {
766 int inc, desired_align, int_vbase_size;
767 register tree base_binfo = TREE_VEC_ELT (binfos, i);
768 register tree basetype = BINFO_TYPE (base_binfo);
769 tree decl, offset;
770
771 if (TYPE_SIZE (basetype) == 0)
772 {
773 #if 0
774 /* This error is now reported in xref_tag, thus giving better
775 location information. */
776 error_with_aggr_type (base_binfo,
777 "base class `%s' has incomplete type");
778
779 TREE_VIA_PUBLIC (base_binfo) = 1;
780 TREE_VIA_PROTECTED (base_binfo) = 0;
781 TREE_VIA_VIRTUAL (base_binfo) = 0;
782
783 /* Should handle this better so that
784
785 class A;
786 class B: private A { virtual void F(); };
787
788 does not dump core when compiled. */
789 my_friendly_abort (121);
790 #endif
791 continue;
792 }
793
794 /* All basetypes are recorded in the association list of the
795 derived type. */
796
797 if (TREE_VIA_VIRTUAL (base_binfo))
798 {
799 int j;
800 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
801 + sizeof (VBASE_NAME) + 1);
802
803 /* The offset for a virtual base class is only used in computing
804 virtual function tables and for initializing virtual base
805 pointers. It is built once `get_vbase_types' is called. */
806
807 /* If this basetype can come from another vbase pointer
808 without an additional indirection, we will share
809 that pointer. If an indirection is involved, we
810 make our own pointer. */
811 for (j = 0; j < n_baseclasses; j++)
812 {
813 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
814 if (! TREE_VIA_VIRTUAL (other_base_binfo)
815 && binfo_member (basetype,
816 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
817 goto got_it;
818 }
819 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
820 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
821 build_pointer_type (basetype));
822 /* If you change any of the below, take a look at all the
823 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
824 them too. */
825 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
826 DECL_VIRTUAL_P (decl) = 1;
827 DECL_FIELD_CONTEXT (decl) = rec;
828 DECL_CLASS_CONTEXT (decl) = rec;
829 DECL_FCONTEXT (decl) = basetype;
830 DECL_SAVED_INSNS (decl) = NULL_RTX;
831 DECL_FIELD_SIZE (decl) = 0;
832 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
833 TREE_CHAIN (decl) = vbase_decls;
834 BINFO_VPTR_FIELD (base_binfo) = decl;
835 vbase_decls = decl;
836
837 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
838 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
839 {
840 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
841 "destructor `%s' non-virtual");
842 warning ("in inheritance relationship `%s: virtual %s'",
843 TYPE_NAME_STRING (rec),
844 TYPE_NAME_STRING (basetype));
845 }
846 got_it:
847 /* The space this decl occupies has already been accounted for. */
848 continue;
849 }
850
851 if (const_size == 0)
852 offset = integer_zero_node;
853 else
854 {
855 /* Give each base type the alignment it wants. */
856 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
857 * TYPE_ALIGN (basetype);
858 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
859
860 #if 0
861 /* bpk: Disabled this check until someone is willing to
862 claim it as theirs and explain exactly what circumstances
863 warrant the warning. */
864 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
865 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
866 {
867 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
868 "destructor `%s' non-virtual");
869 warning ("in inheritance relationship `%s:%s %s'",
870 TYPE_NAME_STRING (rec),
871 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
872 TYPE_NAME_STRING (basetype));
873 }
874 #endif
875 }
876 BINFO_OFFSET (base_binfo) = offset;
877 if (CLASSTYPE_VSIZE (basetype))
878 {
879 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
880 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
881 }
882 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
883 TYPE_BINFO (rec) = base_binfo;
884
885 /* Add only the amount of storage not present in
886 the virtual baseclasses. */
887
888 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
889 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
890 {
891 inc = MAX (record_align,
892 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
893 - int_vbase_size));
894
895 /* Record must have at least as much alignment as any field. */
896 desired_align = TYPE_ALIGN (basetype);
897 record_align = MAX (record_align, desired_align);
898
899 const_size += inc;
900 }
901 }
902
903 if (const_size)
904 CLASSTYPE_SIZE (rec) = size_int (const_size);
905 else
906 CLASSTYPE_SIZE (rec) = integer_zero_node;
907 CLASSTYPE_ALIGN (rec) = record_align;
908
909 return vbase_decls;
910 }
911 \f
912 /* Hashing of lists so that we don't make duplicates.
913 The entry point is `list_hash_canon'. */
914
915 /* Each hash table slot is a bucket containing a chain
916 of these structures. */
917
918 struct list_hash
919 {
920 struct list_hash *next; /* Next structure in the bucket. */
921 int hashcode; /* Hash code of this list. */
922 tree list; /* The list recorded here. */
923 };
924
925 /* Now here is the hash table. When recording a list, it is added
926 to the slot whose index is the hash code mod the table size.
927 Note that the hash table is used for several kinds of lists.
928 While all these live in the same table, they are completely independent,
929 and the hash code is computed differently for each of these. */
930
931 #define TYPE_HASH_SIZE 59
932 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
933
934 /* Compute a hash code for a list (chain of TREE_LIST nodes
935 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
936 TREE_COMMON slots), by adding the hash codes of the individual entries. */
937
938 int
939 list_hash (list)
940 tree list;
941 {
942 register int hashcode = 0;
943
944 if (TREE_CHAIN (list))
945 hashcode += TYPE_HASH (TREE_CHAIN (list));
946
947 if (TREE_VALUE (list))
948 hashcode += TYPE_HASH (TREE_VALUE (list));
949 else
950 hashcode += 1007;
951 if (TREE_PURPOSE (list))
952 hashcode += TYPE_HASH (TREE_PURPOSE (list));
953 else
954 hashcode += 1009;
955 return hashcode;
956 }
957
958 /* Look in the type hash table for a type isomorphic to TYPE.
959 If one is found, return it. Otherwise return 0. */
960
961 tree
962 list_hash_lookup (hashcode, list)
963 int hashcode;
964 tree list;
965 {
966 register struct list_hash *h;
967 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
968 if (h->hashcode == hashcode
969 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
970 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
971 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
972 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
973 && TREE_VALUE (h->list) == TREE_VALUE (list)
974 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
975 {
976 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
977 return h->list;
978 }
979 return 0;
980 }
981
982 /* Add an entry to the list-hash-table
983 for a list TYPE whose hash code is HASHCODE. */
984
985 void
986 list_hash_add (hashcode, list)
987 int hashcode;
988 tree list;
989 {
990 register struct list_hash *h;
991
992 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
993 h->hashcode = hashcode;
994 h->list = list;
995 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
996 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
997 }
998
999 /* Given TYPE, and HASHCODE its hash code, return the canonical
1000 object for an identical list if one already exists.
1001 Otherwise, return TYPE, and record it as the canonical object
1002 if it is a permanent object.
1003
1004 To use this function, first create a list of the sort you want.
1005 Then compute its hash code from the fields of the list that
1006 make it different from other similar lists.
1007 Then call this function and use the value.
1008 This function frees the list you pass in if it is a duplicate. */
1009
1010 /* Set to 1 to debug without canonicalization. Never set by program. */
1011 static int debug_no_list_hash = 0;
1012
1013 tree
1014 list_hash_canon (hashcode, list)
1015 int hashcode;
1016 tree list;
1017 {
1018 tree t1;
1019
1020 if (debug_no_list_hash)
1021 return list;
1022
1023 t1 = list_hash_lookup (hashcode, list);
1024 if (t1 != 0)
1025 {
1026 obstack_free (&class_obstack, list);
1027 return t1;
1028 }
1029
1030 /* If this is a new list, record it for later reuse. */
1031 list_hash_add (hashcode, list);
1032
1033 return list;
1034 }
1035
1036 tree
1037 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1038 int via_public, via_virtual, via_protected;
1039 tree purpose, value, chain;
1040 {
1041 struct obstack *ambient_obstack = current_obstack;
1042 tree t;
1043 int hashcode;
1044
1045 current_obstack = &class_obstack;
1046 t = tree_cons (purpose, value, chain);
1047 TREE_VIA_PUBLIC (t) = via_public;
1048 TREE_VIA_PROTECTED (t) = via_protected;
1049 TREE_VIA_VIRTUAL (t) = via_virtual;
1050 hashcode = list_hash (t);
1051 t = list_hash_canon (hashcode, t);
1052 current_obstack = ambient_obstack;
1053 return t;
1054 }
1055
1056 /* Constructor for hashed lists. */
1057 tree
1058 hash_tree_chain (value, chain)
1059 tree value, chain;
1060 {
1061 struct obstack *ambient_obstack = current_obstack;
1062 tree t;
1063 int hashcode;
1064
1065 current_obstack = &class_obstack;
1066 t = tree_cons (NULL_TREE, value, chain);
1067 hashcode = list_hash (t);
1068 t = list_hash_canon (hashcode, t);
1069 current_obstack = ambient_obstack;
1070 return t;
1071 }
1072
1073 /* Similar, but used for concatenating two lists. */
1074 tree
1075 hash_chainon (list1, list2)
1076 tree list1, list2;
1077 {
1078 if (list2 == 0)
1079 return list1;
1080 if (list1 == 0)
1081 return list2;
1082 if (TREE_CHAIN (list1) == NULL_TREE)
1083 return hash_tree_chain (TREE_VALUE (list1), list2);
1084 return hash_tree_chain (TREE_VALUE (list1),
1085 hash_chainon (TREE_CHAIN (list1), list2));
1086 }
1087
1088 static tree
1089 get_identifier_list (value)
1090 tree value;
1091 {
1092 tree list = IDENTIFIER_AS_LIST (value);
1093 if (list != NULL_TREE
1094 && (TREE_CODE (list) != TREE_LIST
1095 || TREE_VALUE (list) != value))
1096 list = NULL_TREE;
1097 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1098 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1099 && IDENTIFIER_TYPE_VALUE (value)
1100 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1101 {
1102 tree type = IDENTIFIER_TYPE_VALUE (value);
1103
1104 if (TYPE_PTRMEMFUNC_P (type))
1105 list = NULL_TREE;
1106 else if (type == current_class_type)
1107 /* Don't mess up the constructor name. */
1108 list = tree_cons (NULL_TREE, value, NULL_TREE);
1109 else
1110 {
1111 register tree id;
1112 /* This will return the correct thing for regular types,
1113 nested types, and templates. Yay! */
1114 if (TYPE_NESTED_NAME (type))
1115 id = TYPE_NESTED_NAME (type);
1116 else
1117 id = TYPE_IDENTIFIER (type);
1118
1119 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
1120 CLASSTYPE_ID_AS_LIST (type)
1121 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
1122 list = CLASSTYPE_ID_AS_LIST (type);
1123 }
1124 }
1125 return list;
1126 }
1127
1128 tree
1129 get_decl_list (value)
1130 tree value;
1131 {
1132 tree list = NULL_TREE;
1133
1134 if (TREE_CODE (value) == IDENTIFIER_NODE)
1135 list = get_identifier_list (value);
1136 else if (TREE_CODE (value) == RECORD_TYPE
1137 && TYPE_LANG_SPECIFIC (value))
1138 list = CLASSTYPE_AS_LIST (value);
1139
1140 if (list != NULL_TREE)
1141 {
1142 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1143 return list;
1144 }
1145
1146 return build_decl_list (NULL_TREE, value);
1147 }
1148
1149 /* Look in the type hash table for a type isomorphic to
1150 `build_tree_list (NULL_TREE, VALUE)'.
1151 If one is found, return it. Otherwise return 0. */
1152
1153 tree
1154 list_hash_lookup_or_cons (value)
1155 tree value;
1156 {
1157 register int hashcode = TYPE_HASH (value);
1158 register struct list_hash *h;
1159 struct obstack *ambient_obstack;
1160 tree list = NULL_TREE;
1161
1162 if (TREE_CODE (value) == IDENTIFIER_NODE)
1163 list = get_identifier_list (value);
1164 else if (TREE_CODE (value) == TYPE_DECL
1165 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1166 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1167 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1168 else if (TREE_CODE (value) == RECORD_TYPE
1169 && TYPE_LANG_SPECIFIC (value))
1170 list = CLASSTYPE_AS_LIST (value);
1171
1172 if (list != NULL_TREE)
1173 {
1174 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1175 return list;
1176 }
1177
1178 if (debug_no_list_hash)
1179 return hash_tree_chain (value, NULL_TREE);
1180
1181 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1182 if (h->hashcode == hashcode
1183 && TREE_VIA_VIRTUAL (h->list) == 0
1184 && TREE_VIA_PUBLIC (h->list) == 0
1185 && TREE_VIA_PROTECTED (h->list) == 0
1186 && TREE_PURPOSE (h->list) == 0
1187 && TREE_VALUE (h->list) == value)
1188 {
1189 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1190 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1191 return h->list;
1192 }
1193
1194 ambient_obstack = current_obstack;
1195 current_obstack = &class_obstack;
1196 list = build_tree_list (NULL_TREE, value);
1197 list_hash_add (hashcode, list);
1198 current_obstack = ambient_obstack;
1199 return list;
1200 }
1201 \f
1202 /* Build an association between TYPE and some parameters:
1203
1204 OFFSET is the offset added to `this' to convert it to a pointer
1205 of type `TYPE *'
1206
1207 BINFO is the base binfo to use, if we are deriving from one. This
1208 is necessary, as we want specialized parent binfos from base
1209 classes, so that the VTABLE_NAMEs of bases are for the most derived
1210 type, instead of of the simple type.
1211
1212 VTABLE is the virtual function table with which to initialize
1213 sub-objects of type TYPE.
1214
1215 VIRTUALS are the virtual functions sitting in VTABLE.
1216
1217 CHAIN are more associations we must retain. */
1218
1219 tree
1220 make_binfo (offset, binfo, vtable, virtuals, chain)
1221 tree offset, binfo;
1222 tree vtable, virtuals;
1223 tree chain;
1224 {
1225 tree new_binfo = make_tree_vec (6);
1226 tree type;
1227
1228 if (TREE_CODE (binfo) == TREE_VEC)
1229 type = BINFO_TYPE (binfo);
1230 else
1231 {
1232 type = binfo;
1233 binfo = TYPE_BINFO (binfo);
1234 }
1235
1236 TREE_CHAIN (new_binfo) = chain;
1237 if (chain)
1238 TREE_USED (new_binfo) = TREE_USED (chain);
1239
1240 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1241 BINFO_OFFSET (new_binfo) = offset;
1242 BINFO_VTABLE (new_binfo) = vtable;
1243 BINFO_VIRTUALS (new_binfo) = virtuals;
1244 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1245
1246 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1247 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1248 return new_binfo;
1249 }
1250
1251 /* Return the binfo value for ELEM in TYPE. */
1252
1253 tree
1254 binfo_value (elem, type)
1255 tree elem;
1256 tree type;
1257 {
1258 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1259 compiler_error ("base class `%s' ambiguous in binfo_value",
1260 TYPE_NAME_STRING (elem));
1261 if (elem == type)
1262 return TYPE_BINFO (type);
1263 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1264 return type;
1265 return get_binfo (elem, type, 0);
1266 }
1267
1268 tree
1269 reverse_path (path)
1270 tree path;
1271 {
1272 register tree prev = 0, tmp, next;
1273 for (tmp = path; tmp; tmp = next)
1274 {
1275 next = BINFO_INHERITANCE_CHAIN (tmp);
1276 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1277 prev = tmp;
1278 }
1279 return prev;
1280 }
1281
1282 tree
1283 virtual_member (elem, list)
1284 tree elem;
1285 tree list;
1286 {
1287 tree t;
1288 tree rval, nval;
1289
1290 for (t = list; t; t = TREE_CHAIN (t))
1291 if (elem == BINFO_TYPE (t))
1292 return t;
1293 rval = 0;
1294 for (t = list; t; t = TREE_CHAIN (t))
1295 {
1296 tree binfos = BINFO_BASETYPES (t);
1297 int i;
1298
1299 if (binfos != NULL_TREE)
1300 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1301 {
1302 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1303 if (nval)
1304 {
1305 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1306 my_friendly_abort (104);
1307 rval = nval;
1308 }
1309 }
1310 }
1311 return rval;
1312 }
1313
1314 void
1315 debug_binfo (elem)
1316 tree elem;
1317 {
1318 int i;
1319 tree virtuals;
1320
1321 fprintf (stderr, "type \"%s\"; offset = %d\n",
1322 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1323 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1324 fprintf (stderr, "vtable type:\n");
1325 debug_tree (BINFO_TYPE (elem));
1326 if (BINFO_VTABLE (elem))
1327 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1328 else
1329 fprintf (stderr, "no vtable decl yet\n");
1330 fprintf (stderr, "virtuals:\n");
1331 virtuals = BINFO_VIRTUALS (elem);
1332 if (virtuals != 0)
1333 {
1334 /* skip the rtti type descriptor entry */
1335 virtuals = TREE_CHAIN (virtuals);
1336 }
1337 i = 1;
1338 while (virtuals)
1339 {
1340 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1341 fprintf (stderr, "%s [%d =? %d]\n",
1342 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1343 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1344 virtuals = TREE_CHAIN (virtuals);
1345 i += 1;
1346 }
1347 }
1348
1349 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1350 We expect a null pointer to mark the end of the chain.
1351 This is the Lisp primitive `length'. */
1352
1353 int
1354 decl_list_length (t)
1355 tree t;
1356 {
1357 register tree tail;
1358 register int len = 0;
1359
1360 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1361 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1362 for (tail = t; tail; tail = DECL_CHAIN (tail))
1363 len++;
1364
1365 return len;
1366 }
1367
1368 int
1369 count_functions (t)
1370 tree t;
1371 {
1372 if (TREE_CODE (t) == FUNCTION_DECL)
1373 return 1;
1374 else if (TREE_CODE (t) == TREE_LIST)
1375 return decl_list_length (TREE_VALUE (t));
1376
1377 my_friendly_abort (359);
1378 return 0;
1379 }
1380
1381 /* Like value_member, but for DECL_CHAINs. */
1382 tree
1383 decl_value_member (elem, list)
1384 tree elem, list;
1385 {
1386 while (list)
1387 {
1388 if (elem == list)
1389 return list;
1390 list = DECL_CHAIN (list);
1391 }
1392 return NULL_TREE;
1393 }
1394
1395 int
1396 is_overloaded_fn (x)
1397 tree x;
1398 {
1399 if (TREE_CODE (x) == FUNCTION_DECL)
1400 return 1;
1401
1402 if (TREE_CODE (x) == TREE_LIST
1403 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1404 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1405 return 1;
1406
1407 return 0;
1408 }
1409
1410 int
1411 really_overloaded_fn (x)
1412 tree x;
1413 {
1414 if (TREE_CODE (x) == TREE_LIST
1415 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1416 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1417 return 1;
1418
1419 return 0;
1420 }
1421
1422 tree
1423 get_first_fn (from)
1424 tree from;
1425 {
1426 if (TREE_CODE (from) == FUNCTION_DECL)
1427 return from;
1428
1429 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1430
1431 return TREE_VALUE (from);
1432 }
1433
1434 tree
1435 fnaddr_from_vtable_entry (entry)
1436 tree entry;
1437 {
1438 if (flag_vtable_thunks)
1439 {
1440 tree func = entry;
1441 if (TREE_CODE (func) == ADDR_EXPR)
1442 func = TREE_OPERAND (func, 0);
1443 if (TREE_CODE (func) == THUNK_DECL)
1444 return DECL_INITIAL (func);
1445 else
1446 return entry;
1447 }
1448 else
1449 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1450 }
1451
1452 void
1453 set_fnaddr_from_vtable_entry (entry, value)
1454 tree entry, value;
1455 {
1456 if (flag_vtable_thunks)
1457 abort ();
1458 else
1459 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1460 }
1461
1462 tree
1463 function_arg_chain (t)
1464 tree t;
1465 {
1466 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1467 }
1468
1469 int
1470 promotes_to_aggr_type (t, code)
1471 tree t;
1472 enum tree_code code;
1473 {
1474 if (TREE_CODE (t) == code)
1475 t = TREE_TYPE (t);
1476 return IS_AGGR_TYPE (t);
1477 }
1478
1479 int
1480 is_aggr_type_2 (t1, t2)
1481 tree t1, t2;
1482 {
1483 if (TREE_CODE (t1) != TREE_CODE (t2))
1484 return 0;
1485 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1486 }
1487
1488 /* Give message using types TYPE1 and TYPE2 as arguments.
1489 PFN is the function which will print the message;
1490 S is the format string for PFN to use. */
1491 void
1492 message_2_types (pfn, s, type1, type2)
1493 void (*pfn) ();
1494 char *s;
1495 tree type1, type2;
1496 {
1497 tree name1 = TYPE_NAME (type1);
1498 tree name2 = TYPE_NAME (type2);
1499 if (TREE_CODE (name1) == TYPE_DECL)
1500 name1 = DECL_NAME (name1);
1501 if (TREE_CODE (name2) == TYPE_DECL)
1502 name2 = DECL_NAME (name2);
1503 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1504 }
1505 \f
1506 #define PRINT_RING_SIZE 4
1507
1508 char *
1509 lang_printable_name (decl)
1510 tree decl;
1511 {
1512 static tree decl_ring[PRINT_RING_SIZE];
1513 static char *print_ring[PRINT_RING_SIZE];
1514 static int ring_counter;
1515 int i;
1516
1517 /* Only cache functions. */
1518 if (TREE_CODE (decl) != FUNCTION_DECL
1519 || DECL_LANG_SPECIFIC (decl) == 0)
1520 return decl_as_string (decl, 1);
1521
1522 /* See if this print name is lying around. */
1523 for (i = 0; i < PRINT_RING_SIZE; i++)
1524 if (decl_ring[i] == decl)
1525 /* yes, so return it. */
1526 return print_ring[i];
1527
1528 if (++ring_counter == PRINT_RING_SIZE)
1529 ring_counter = 0;
1530
1531 if (current_function_decl != NULL_TREE)
1532 {
1533 if (decl_ring[ring_counter] == current_function_decl)
1534 ring_counter += 1;
1535 if (ring_counter == PRINT_RING_SIZE)
1536 ring_counter = 0;
1537 if (decl_ring[ring_counter] == current_function_decl)
1538 my_friendly_abort (106);
1539 }
1540
1541 if (print_ring[ring_counter])
1542 free (print_ring[ring_counter]);
1543
1544 {
1545 int print_ret_type_p
1546 = (!DECL_CONSTRUCTOR_P (decl)
1547 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1548
1549 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1550 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1551 strcpy (print_ring[ring_counter], name);
1552 decl_ring[ring_counter] = decl;
1553 }
1554 return print_ring[ring_counter];
1555 }
1556 \f
1557 /* Comparison function for sorting identifiers in RAISES lists.
1558 Note that because IDENTIFIER_NODEs are unique, we can sort
1559 them by address, saving an indirection. */
1560 static int
1561 id_cmp (p1, p2)
1562 tree *p1, *p2;
1563 {
1564 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1565 }
1566
1567 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1568 listed in RAISES. */
1569 tree
1570 build_exception_variant (ctype, type, raises)
1571 tree ctype, type;
1572 tree raises;
1573 {
1574 int i;
1575 tree v = TYPE_MAIN_VARIANT (type);
1576 tree t, t2, cname;
1577 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1578 int constp = TYPE_READONLY (type);
1579 int volatilep = TYPE_VOLATILE (type);
1580
1581 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1582 {
1583 if (TYPE_READONLY (v) != constp
1584 || TYPE_VOLATILE (v) != volatilep)
1585 continue;
1586
1587 /* @@ This should do set equality, not exact match. */
1588 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1589 /* List of exceptions raised matches previously found list.
1590
1591 @@ Nice to free up storage used in consing up the
1592 @@ list of exceptions raised. */
1593 return v;
1594 }
1595
1596 /* Need to build a new variant. */
1597 v = copy_node (type);
1598 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1599 TYPE_NEXT_VARIANT (type) = v;
1600 if (raises && ! TREE_PERMANENT (raises))
1601 {
1602 push_obstacks_nochange ();
1603 end_temporary_allocation ();
1604 raises = copy_list (raises);
1605 pop_obstacks ();
1606 }
1607 TYPE_RAISES_EXCEPTIONS (v) = raises;
1608 return v;
1609 }
1610
1611 /* Subroutine of copy_to_permanent
1612
1613 Assuming T is a node build bottom-up, make it all exist on
1614 permanent obstack, if it is not permanent already. */
1615 static tree
1616 make_deep_copy (t)
1617 tree t;
1618 {
1619 enum tree_code code;
1620
1621 if (t == NULL_TREE || TREE_PERMANENT (t))
1622 return t;
1623
1624 switch (code = TREE_CODE (t))
1625 {
1626 case ERROR_MARK:
1627 return error_mark_node;
1628
1629 case VAR_DECL:
1630 case FUNCTION_DECL:
1631 case CONST_DECL:
1632 break;
1633
1634 case PARM_DECL:
1635 {
1636 tree chain = TREE_CHAIN (t);
1637 t = copy_node (t);
1638 TREE_CHAIN (t) = make_deep_copy (chain);
1639 TREE_TYPE (t) = make_deep_copy (TREE_TYPE (t));
1640 DECL_INITIAL (t) = make_deep_copy (DECL_INITIAL (t));
1641 DECL_SIZE (t) = make_deep_copy (DECL_SIZE (t));
1642 return t;
1643 }
1644
1645 case TREE_LIST:
1646 {
1647 tree chain = TREE_CHAIN (t);
1648 t = copy_node (t);
1649 TREE_PURPOSE (t) = make_deep_copy (TREE_PURPOSE (t));
1650 TREE_VALUE (t) = make_deep_copy (TREE_VALUE (t));
1651 TREE_CHAIN (t) = make_deep_copy (chain);
1652 return t;
1653 }
1654
1655 case TREE_VEC:
1656 {
1657 int len = TREE_VEC_LENGTH (t);
1658
1659 t = copy_node (t);
1660 while (len--)
1661 TREE_VEC_ELT (t, len) = make_deep_copy (TREE_VEC_ELT (t, len));
1662 return t;
1663 }
1664
1665 case INTEGER_CST:
1666 case REAL_CST:
1667 case STRING_CST:
1668 return copy_node (t);
1669
1670 case COND_EXPR:
1671 case TARGET_EXPR:
1672 case NEW_EXPR:
1673 t = copy_node (t);
1674 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1675 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1676 TREE_OPERAND (t, 2) = make_deep_copy (TREE_OPERAND (t, 2));
1677 return t;
1678
1679 case SAVE_EXPR:
1680 t = copy_node (t);
1681 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1682 return t;
1683
1684 case MODIFY_EXPR:
1685 case PLUS_EXPR:
1686 case MINUS_EXPR:
1687 case MULT_EXPR:
1688 case TRUNC_DIV_EXPR:
1689 case TRUNC_MOD_EXPR:
1690 case MIN_EXPR:
1691 case MAX_EXPR:
1692 case LSHIFT_EXPR:
1693 case RSHIFT_EXPR:
1694 case BIT_IOR_EXPR:
1695 case BIT_XOR_EXPR:
1696 case BIT_AND_EXPR:
1697 case BIT_ANDTC_EXPR:
1698 case TRUTH_ANDIF_EXPR:
1699 case TRUTH_ORIF_EXPR:
1700 case LT_EXPR:
1701 case LE_EXPR:
1702 case GT_EXPR:
1703 case GE_EXPR:
1704 case EQ_EXPR:
1705 case NE_EXPR:
1706 case CEIL_DIV_EXPR:
1707 case FLOOR_DIV_EXPR:
1708 case ROUND_DIV_EXPR:
1709 case CEIL_MOD_EXPR:
1710 case FLOOR_MOD_EXPR:
1711 case ROUND_MOD_EXPR:
1712 case COMPOUND_EXPR:
1713 case PREDECREMENT_EXPR:
1714 case PREINCREMENT_EXPR:
1715 case POSTDECREMENT_EXPR:
1716 case POSTINCREMENT_EXPR:
1717 case CALL_EXPR:
1718 t = copy_node (t);
1719 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1720 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1721 return t;
1722
1723 case CONVERT_EXPR:
1724 case ADDR_EXPR:
1725 case INDIRECT_REF:
1726 case NEGATE_EXPR:
1727 case BIT_NOT_EXPR:
1728 case TRUTH_NOT_EXPR:
1729 case NOP_EXPR:
1730 case COMPONENT_REF:
1731 t = copy_node (t);
1732 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1733 return t;
1734
1735 case POINTER_TYPE:
1736 return build_pointer_type (make_deep_copy (TREE_TYPE (t)));
1737 case REFERENCE_TYPE:
1738 return build_reference_type (make_deep_copy (TREE_TYPE (t)));
1739 case FUNCTION_TYPE:
1740 return build_function_type (make_deep_copy (TREE_TYPE (t)),
1741 make_deep_copy (TYPE_ARG_TYPES (t)));
1742 case ARRAY_TYPE:
1743 return build_array_type (make_deep_copy (TREE_TYPE (t)),
1744 make_deep_copy (TYPE_DOMAIN (t)));
1745 case INTEGER_TYPE:
1746 return build_index_type (make_deep_copy (TYPE_MAX_VALUE (t)));
1747
1748 case OFFSET_TYPE:
1749 return build_offset_type (make_deep_copy (TYPE_OFFSET_BASETYPE (t)),
1750 make_deep_copy (TREE_TYPE (t)));
1751 case METHOD_TYPE:
1752 return build_method_type
1753 (make_deep_copy (TYPE_METHOD_BASETYPE (t)),
1754 build_function_type
1755 (make_deep_copy (TREE_TYPE (t)),
1756 make_deep_copy (TREE_CHAIN (TYPE_ARG_TYPES (t)))));
1757
1758 case RECORD_TYPE:
1759 if (TYPE_PTRMEMFUNC_P (t))
1760 return build_ptrmemfunc_type
1761 (make_deep_copy (TYPE_PTRMEMFUNC_FN_TYPE (t)));
1762 /* else fall through */
1763
1764 /* This list is incomplete, but should suffice for now.
1765 It is very important that `sorry' does not call
1766 `report_error_function'. That could cause an infinite loop. */
1767 default:
1768 sorry ("initializer contains unrecognized tree code");
1769 return error_mark_node;
1770
1771 }
1772 my_friendly_abort (107);
1773 /* NOTREACHED */
1774 return NULL_TREE;
1775 }
1776
1777 /* Assuming T is a node built bottom-up, make it all exist on
1778 permanent obstack, if it is not permanent already. */
1779 tree
1780 copy_to_permanent (t)
1781 tree t;
1782 {
1783 register struct obstack *ambient_obstack = current_obstack;
1784 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1785
1786 if (t == NULL_TREE || TREE_PERMANENT (t))
1787 return t;
1788
1789 saveable_obstack = &permanent_obstack;
1790 current_obstack = saveable_obstack;
1791
1792 t = make_deep_copy (t);
1793
1794 current_obstack = ambient_obstack;
1795 saveable_obstack = ambient_saveable_obstack;
1796
1797 return t;
1798 }
1799
1800 void
1801 print_lang_statistics ()
1802 {
1803 extern struct obstack maybepermanent_obstack;
1804 print_obstack_statistics ("class_obstack", &class_obstack);
1805 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1806 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1807 print_search_statistics ();
1808 print_class_statistics ();
1809 }
1810
1811 /* This is used by the `assert' macro. It is provided in libgcc.a,
1812 which `cc' doesn't know how to link. Note that the C++ front-end
1813 no longer actually uses the `assert' macro (instead, it calls
1814 my_friendly_assert). But all of the back-end files still need this. */
1815 void
1816 __eprintf (string, expression, line, filename)
1817 #ifdef __STDC__
1818 const char *string;
1819 const char *expression;
1820 unsigned line;
1821 const char *filename;
1822 #else
1823 char *string;
1824 char *expression;
1825 unsigned line;
1826 char *filename;
1827 #endif
1828 {
1829 fprintf (stderr, string, expression, line, filename);
1830 fflush (stderr);
1831 abort ();
1832 }
1833
1834 /* Return, as an INTEGER_CST node, the number of elements for
1835 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1836
1837 tree
1838 array_type_nelts_top (type)
1839 tree type;
1840 {
1841 return fold (build (PLUS_EXPR, sizetype,
1842 array_type_nelts (type),
1843 integer_one_node));
1844 }
1845
1846 /* Return, as an INTEGER_CST node, the number of elements for
1847 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1848 ARRAY_TYPEs that are clumped together. */
1849
1850 tree
1851 array_type_nelts_total (type)
1852 tree type;
1853 {
1854 tree sz = array_type_nelts_top (type);
1855 type = TREE_TYPE (type);
1856 while (TREE_CODE (type) == ARRAY_TYPE)
1857 {
1858 tree n = array_type_nelts_top (type);
1859 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1860 type = TREE_TYPE (type);
1861 }
1862 return sz;
1863 }