45th Cygnus<->FSF merge
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20
21 #include "config.h"
22 #include <stdio.h>
23 #include "obstack.h"
24 #include "tree.h"
25 #include "cp-tree.h"
26 #include "flags.h"
27
28 #define CEIL(x,y) (((x) + (y) - 1) / (y))
29
30 /* Return nonzero if REF is an lvalue valid for this language.
31 Lvalues can be assigned, unless they have TREE_READONLY.
32 Lvalues can have their address taken, unless they have DECL_REGISTER. */
33
34 int
35 lvalue_p (ref)
36 tree ref;
37 {
38 if (! language_lvalue_valid (ref))
39 return 0;
40
41 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
42 return 1;
43
44 if (ref == current_class_decl && flag_this_is_variable <= 0)
45 return 0;
46
47 switch (TREE_CODE (ref))
48 {
49 /* preincrements and predecrements are valid lvals, provided
50 what they refer to are valid lvals. */
51 case PREINCREMENT_EXPR:
52 case PREDECREMENT_EXPR:
53 case COMPONENT_REF:
54 case SAVE_EXPR:
55 return lvalue_p (TREE_OPERAND (ref, 0));
56
57 case STRING_CST:
58 return 1;
59
60 case VAR_DECL:
61 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
62 && DECL_LANG_SPECIFIC (ref)
63 && DECL_IN_AGGR_P (ref))
64 return 0;
65 case INDIRECT_REF:
66 case ARRAY_REF:
67 case PARM_DECL:
68 case RESULT_DECL:
69 case ERROR_MARK:
70 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
71 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
72 return 1;
73 break;
74
75 case WITH_CLEANUP_EXPR:
76 return lvalue_p (TREE_OPERAND (ref, 0));
77
78 case TARGET_EXPR:
79 return 1;
80
81 case CALL_EXPR:
82 if (TREE_ADDRESSABLE (TREE_TYPE (ref)))
83 return 1;
84 break;
85
86 /* A currently unresolved scope ref. */
87 case SCOPE_REF:
88 my_friendly_abort (103);
89 case OFFSET_REF:
90 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
91 return 1;
92 return lvalue_p (TREE_OPERAND (ref, 0))
93 && lvalue_p (TREE_OPERAND (ref, 1));
94 break;
95
96 case COND_EXPR:
97 return (lvalue_p (TREE_OPERAND (ref, 1))
98 && lvalue_p (TREE_OPERAND (ref, 2)));
99
100 case MODIFY_EXPR:
101 return 1;
102
103 case COMPOUND_EXPR:
104 return lvalue_p (TREE_OPERAND (ref, 1));
105 }
106
107 return 0;
108 }
109
110 /* Return nonzero if REF is an lvalue valid for this language;
111 otherwise, print an error message and return zero. */
112
113 int
114 lvalue_or_else (ref, string)
115 tree ref;
116 char *string;
117 {
118 int win = lvalue_p (ref);
119 if (! win)
120 error ("non-lvalue in %s", string);
121 return win;
122 }
123
124 /* INIT is a CALL_EXPR which needs info about its target.
125 TYPE is the type that this initialization should appear to have.
126
127 Build an encapsulation of the initialization to perform
128 and return it so that it can be processed by language-independent
129 and language-specific expression expanders.
130
131 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
132 Otherwise, cleanups are not built here. For example, when building
133 an initialization for a stack slot, since the called function handles
134 the cleanup, we would not want to do it here. */
135 tree
136 build_cplus_new (type, init, with_cleanup_p)
137 tree type;
138 tree init;
139 int with_cleanup_p;
140 {
141 tree slot = build (VAR_DECL, type);
142 tree rval = build (NEW_EXPR, type,
143 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
144 TREE_SIDE_EFFECTS (rval) = 1;
145 TREE_ADDRESSABLE (rval) = 1;
146 rval = build (TARGET_EXPR, type, slot, rval, 0);
147 TREE_SIDE_EFFECTS (rval) = 1;
148 TREE_ADDRESSABLE (rval) = 1;
149
150 #if 0
151 if (with_cleanup_p && TYPE_NEEDS_DESTRUCTOR (type))
152 {
153 TREE_OPERAND (rval, 2) = error_mark_node;
154 rval = build (WITH_CLEANUP_EXPR, type, rval, 0,
155 build_delete (TYPE_POINTER_TO (type),
156 build_unary_op (ADDR_EXPR, slot, 0),
157 integer_two_node,
158 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0));
159 TREE_SIDE_EFFECTS (rval) = 1;
160 TREE_ADDRESSABLE (rval) = 1;
161 }
162 #endif
163 return rval;
164 }
165
166 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
167 these CALL_EXPRs with tree nodes that will perform the cleanups. */
168
169 tree
170 break_out_cleanups (exp)
171 tree exp;
172 {
173 tree tmp = exp;
174
175 if (TREE_CODE (tmp) == CALL_EXPR
176 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
177 return build_cplus_new (TREE_TYPE (tmp), tmp, 1);
178
179 while (TREE_CODE (tmp) == NOP_EXPR
180 || TREE_CODE (tmp) == CONVERT_EXPR
181 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
182 {
183 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
184 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
185 {
186 TREE_OPERAND (tmp, 0)
187 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
188 TREE_OPERAND (tmp, 0), 1);
189 break;
190 }
191 else
192 tmp = TREE_OPERAND (tmp, 0);
193 }
194 return exp;
195 }
196
197 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
198 copies where they are found. Returns a deep copy all nodes transitively
199 containing CALL_EXPRs. */
200
201 tree
202 break_out_calls (exp)
203 tree exp;
204 {
205 register tree t1, t2;
206 register enum tree_code code;
207 register int changed = 0;
208 register int i;
209
210 if (exp == NULL_TREE)
211 return exp;
212
213 code = TREE_CODE (exp);
214
215 if (code == CALL_EXPR)
216 return copy_node (exp);
217
218 /* Don't try and defeat a save_expr, as it should only be done once. */
219 if (code == SAVE_EXPR)
220 return exp;
221
222 switch (TREE_CODE_CLASS (code))
223 {
224 default:
225 abort ();
226
227 case 'c': /* a constant */
228 case 't': /* a type node */
229 case 'x': /* something random, like an identifier or an ERROR_MARK. */
230 return exp;
231
232 case 'd': /* A decl node */
233 t1 = break_out_calls (DECL_INITIAL (exp));
234 if (t1 != DECL_INITIAL (exp))
235 {
236 exp = copy_node (exp);
237 DECL_INITIAL (exp) = t1;
238 }
239 return exp;
240
241 case 'b': /* A block node */
242 {
243 /* Don't know how to handle these correctly yet. Must do a
244 break_out_calls on all DECL_INITIAL values for local variables,
245 and also break_out_calls on all sub-blocks and sub-statements. */
246 abort ();
247 }
248 return exp;
249
250 case 'e': /* an expression */
251 case 'r': /* a reference */
252 case 's': /* an expression with side effects */
253 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
254 {
255 t1 = break_out_calls (TREE_OPERAND (exp, i));
256 if (t1 != TREE_OPERAND (exp, i))
257 {
258 exp = copy_node (exp);
259 TREE_OPERAND (exp, i) = t1;
260 }
261 }
262 return exp;
263
264 case '<': /* a comparison expression */
265 case '2': /* a binary arithmetic expression */
266 t2 = break_out_calls (TREE_OPERAND (exp, 1));
267 if (t2 != TREE_OPERAND (exp, 1))
268 changed = 1;
269 case '1': /* a unary arithmetic expression */
270 t1 = break_out_calls (TREE_OPERAND (exp, 0));
271 if (t1 != TREE_OPERAND (exp, 0))
272 changed = 1;
273 if (changed)
274 {
275 if (tree_code_length[(int) code] == 1)
276 return build1 (code, TREE_TYPE (exp), t1);
277 else
278 return build (code, TREE_TYPE (exp), t1, t2);
279 }
280 return exp;
281 }
282
283 }
284 \f
285 extern struct obstack *current_obstack;
286 extern struct obstack permanent_obstack, class_obstack;
287 extern struct obstack *saveable_obstack;
288
289 /* Here is how primitive or already-canonicalized types' hash
290 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
291 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
292
293 /* Construct, lay out and return the type of methods belonging to class
294 BASETYPE and whose arguments are described by ARGTYPES and whose values
295 are described by RETTYPE. If each type exists already, reuse it. */
296 tree
297 build_cplus_method_type (basetype, rettype, argtypes)
298 tree basetype, rettype, argtypes;
299 {
300 register tree t;
301 tree ptype;
302 int hashcode;
303
304 /* Make a node of the sort we want. */
305 t = make_node (METHOD_TYPE);
306
307 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
308 TREE_TYPE (t) = rettype;
309 if (IS_SIGNATURE (basetype))
310 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
311 TYPE_READONLY (basetype),
312 TYPE_VOLATILE (basetype));
313 else
314 {
315 ptype = build_pointer_type (basetype);
316 ptype = build_type_variant (ptype, 1, 0);
317 }
318 /* The actual arglist for this function includes a "hidden" argument
319 which is "this". Put it into the list of argument types. */
320
321 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
322 TYPE_ARG_TYPES (t) = argtypes;
323 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
324
325 /* If we already have such a type, use the old one and free this one.
326 Note that it also frees up the above cons cell if found. */
327 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
328 t = type_hash_canon (hashcode, t);
329
330 if (TYPE_SIZE (t) == 0)
331 layout_type (t);
332
333 return t;
334 }
335
336 tree
337 build_cplus_staticfn_type (basetype, rettype, argtypes)
338 tree basetype, rettype, argtypes;
339 {
340 register tree t;
341 int hashcode;
342
343 /* Make a node of the sort we want. */
344 t = make_node (FUNCTION_TYPE);
345
346 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
347 TREE_TYPE (t) = rettype;
348
349 TYPE_ARG_TYPES (t) = argtypes;
350
351 /* If we already have such a type, use the old one and free this one.
352 Note that it also frees up the above cons cell if found. */
353 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
354 t = type_hash_canon (hashcode, t);
355
356 if (TYPE_SIZE (t) == 0)
357 layout_type (t);
358
359 return t;
360 }
361
362 tree
363 build_cplus_array_type (elt_type, index_type)
364 tree elt_type;
365 tree index_type;
366 {
367 register struct obstack *ambient_obstack = current_obstack;
368 register struct obstack *ambient_saveable_obstack = saveable_obstack;
369 tree t;
370
371 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
372 make this permanent too. */
373 if (TREE_PERMANENT (elt_type)
374 && (index_type == 0 || TREE_PERMANENT (index_type)))
375 {
376 current_obstack = &permanent_obstack;
377 saveable_obstack = &permanent_obstack;
378 }
379
380 t = build_array_type (elt_type, index_type);
381
382 /* Push these needs up so that initialization takes place
383 more easily. */
384 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
385 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
386 current_obstack = ambient_obstack;
387 saveable_obstack = ambient_saveable_obstack;
388 return t;
389 }
390 \f
391 /* Add OFFSET to all base types of T.
392
393 OFFSET, which is a type offset, is number of bytes.
394
395 Note that we don't have to worry about having two paths to the
396 same base type, since this type owns its association list. */
397 void
398 propagate_binfo_offsets (binfo, offset)
399 tree binfo;
400 tree offset;
401 {
402 tree binfos = BINFO_BASETYPES (binfo);
403 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
404
405 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
406 {
407 tree base_binfo = TREE_VEC_ELT (binfos, i);
408
409 if (TREE_VIA_VIRTUAL (base_binfo))
410 i += 1;
411 else
412 {
413 int j;
414 tree base_binfos = BINFO_BASETYPES (base_binfo);
415 tree delta;
416
417 for (j = i+1; j < n_baselinks; j++)
418 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
419 {
420 /* The next basetype offset must take into account the space
421 between the classes, not just the size of each class. */
422 delta = size_binop (MINUS_EXPR,
423 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
424 BINFO_OFFSET (base_binfo));
425 break;
426 }
427
428 #if 0
429 if (BINFO_OFFSET_ZEROP (base_binfo))
430 BINFO_OFFSET (base_binfo) = offset;
431 else
432 BINFO_OFFSET (base_binfo)
433 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
434 #else
435 BINFO_OFFSET (base_binfo) = offset;
436 #endif
437 if (base_binfos)
438 {
439 int k;
440 tree chain = NULL_TREE;
441
442 /* Now unshare the structure beneath BASE_BINFO. */
443 for (k = TREE_VEC_LENGTH (base_binfos)-1;
444 k >= 0; k--)
445 {
446 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
447 if (! TREE_VIA_VIRTUAL (base_base_binfo))
448 TREE_VEC_ELT (base_binfos, k)
449 = make_binfo (BINFO_OFFSET (base_base_binfo),
450 base_base_binfo,
451 BINFO_VTABLE (base_base_binfo),
452 BINFO_VIRTUALS (base_base_binfo),
453 chain);
454 chain = TREE_VEC_ELT (base_binfos, k);
455 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
456 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
457 }
458 /* Now propagate the offset to the base types. */
459 propagate_binfo_offsets (base_binfo, offset);
460 }
461
462 /* Go to our next class that counts for offset propagation. */
463 i = j;
464 if (i < n_baselinks)
465 offset = size_binop (PLUS_EXPR, offset, delta);
466 }
467 }
468 }
469
470 /* Compute the actual offsets that our virtual base classes
471 will have *for this type*. This must be performed after
472 the fields are laid out, since virtual baseclasses must
473 lay down at the end of the record.
474
475 Returns the maximum number of virtual functions any of the virtual
476 baseclasses provide. */
477 int
478 layout_vbasetypes (rec, max)
479 tree rec;
480 int max;
481 {
482 /* Get all the virtual base types that this type uses.
483 The TREE_VALUE slot holds the virtual baseclass type. */
484 tree vbase_types = get_vbase_types (rec);
485
486 #ifdef STRUCTURE_SIZE_BOUNDARY
487 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
488 #else
489 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
490 #endif
491 int desired_align;
492
493 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
494 where CONST_SIZE is an integer
495 and VAR_SIZE is a tree expression.
496 If VAR_SIZE is null, the size is just CONST_SIZE.
497 Naturally we try to avoid using VAR_SIZE. */
498 register unsigned const_size = 0;
499 register tree var_size = 0;
500 int nonvirtual_const_size;
501 tree nonvirtual_var_size;
502
503 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
504
505 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
506 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
507 else
508 var_size = TYPE_SIZE (rec);
509
510 nonvirtual_const_size = const_size;
511 nonvirtual_var_size = var_size;
512
513 while (vbase_types)
514 {
515 tree basetype = BINFO_TYPE (vbase_types);
516 tree offset;
517
518 desired_align = TYPE_ALIGN (basetype);
519 record_align = MAX (record_align, desired_align);
520
521 if (const_size == 0)
522 offset = integer_zero_node;
523 else
524 {
525 /* Give each virtual base type the alignment it wants. */
526 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
527 * TYPE_ALIGN (basetype);
528 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
529 }
530
531 if (CLASSTYPE_VSIZE (basetype) > max)
532 max = CLASSTYPE_VSIZE (basetype);
533 BINFO_OFFSET (vbase_types) = offset;
534
535 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
536 const_size += MAX (BITS_PER_UNIT,
537 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
538 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
539 else if (var_size == 0)
540 var_size = TYPE_SIZE (basetype);
541 else
542 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
543
544 vbase_types = TREE_CHAIN (vbase_types);
545 }
546
547 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
548 here, as that is for this class, without any virtual base classes. */
549 TYPE_ALIGN (rec) = record_align;
550 if (const_size != nonvirtual_const_size)
551 {
552 CLASSTYPE_VBASE_SIZE (rec)
553 = size_int (const_size - nonvirtual_const_size);
554 TYPE_SIZE (rec) = size_int (const_size);
555 }
556
557 /* Now propagate offset information throughout the lattice
558 under the vbase type. */
559 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
560 vbase_types = TREE_CHAIN (vbase_types))
561 {
562 tree base_binfos = BINFO_BASETYPES (vbase_types);
563
564 if (base_binfos)
565 {
566 tree chain = NULL_TREE;
567 int j;
568 /* Now unshare the structure beneath BASE_BINFO. */
569
570 for (j = TREE_VEC_LENGTH (base_binfos)-1;
571 j >= 0; j--)
572 {
573 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
574 if (! TREE_VIA_VIRTUAL (base_base_binfo))
575 TREE_VEC_ELT (base_binfos, j)
576 = make_binfo (BINFO_OFFSET (base_base_binfo),
577 base_base_binfo,
578 BINFO_VTABLE (base_base_binfo),
579 BINFO_VIRTUALS (base_base_binfo),
580 chain);
581 chain = TREE_VEC_ELT (base_binfos, j);
582 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
583 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
584 }
585
586 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
587 }
588 }
589
590 return max;
591 }
592
593 /* Lay out the base types of a record type, REC.
594 Tentatively set the size and alignment of REC
595 according to the base types alone.
596
597 Offsets for immediate nonvirtual baseclasses are also computed here.
598
599 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
600 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
601
602 Returns list of virtual base classes in a FIELD_DECL chain. */
603 tree
604 layout_basetypes (rec, binfos)
605 tree rec, binfos;
606 {
607 /* Chain to hold all the new FIELD_DECLs which point at virtual
608 base classes. */
609 tree vbase_decls = NULL_TREE;
610
611 #ifdef STRUCTURE_SIZE_BOUNDARY
612 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
613 #else
614 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
615 #endif
616
617 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
618 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
619 the size is just CONST_SIZE. Naturally we try to avoid using
620 VAR_SIZE. And so far, we've been sucessful. */
621 #if 0
622 register tree var_size = 0;
623 #endif
624
625 register unsigned const_size = 0;
626 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
627
628 /* Handle basetypes almost like fields, but record their
629 offsets differently. */
630
631 for (i = 0; i < n_baseclasses; i++)
632 {
633 int inc, desired_align, int_vbase_size;
634 register tree base_binfo = TREE_VEC_ELT (binfos, i);
635 register tree basetype = BINFO_TYPE (base_binfo);
636 tree decl, offset;
637
638 if (TYPE_SIZE (basetype) == 0)
639 {
640 #if 0
641 /* This error is now reported in xref_tag, thus giving better
642 location information. */
643 error_with_aggr_type (base_binfo,
644 "base class `%s' has incomplete type");
645
646 TREE_VIA_PUBLIC (base_binfo) = 1;
647 TREE_VIA_PROTECTED (base_binfo) = 0;
648 TREE_VIA_VIRTUAL (base_binfo) = 0;
649
650 /* Should handle this better so that
651
652 class A;
653 class B: private A { virtual void F(); };
654
655 does not dump core when compiled. */
656 my_friendly_abort (121);
657 #endif
658 continue;
659 }
660
661 /* All basetypes are recorded in the association list of the
662 derived type. */
663
664 if (TREE_VIA_VIRTUAL (base_binfo))
665 {
666 int j;
667 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
668 + sizeof (VBASE_NAME) + 1);
669
670 /* The offset for a virtual base class is only used in computing
671 virtual function tables and for initializing virtual base
672 pointers. It is built once `get_vbase_types' is called. */
673
674 /* If this basetype can come from another vbase pointer
675 without an additional indirection, we will share
676 that pointer. If an indirection is involved, we
677 make our own pointer. */
678 for (j = 0; j < n_baseclasses; j++)
679 {
680 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
681 if (! TREE_VIA_VIRTUAL (other_base_binfo)
682 && binfo_member (basetype,
683 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
684 goto got_it;
685 }
686 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
687 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
688 build_pointer_type (basetype));
689 /* If you change any of the below, take a look at all the
690 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
691 them too. */
692 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
693 DECL_VIRTUAL_P (decl) = 1;
694 DECL_FIELD_CONTEXT (decl) = rec;
695 DECL_CLASS_CONTEXT (decl) = rec;
696 DECL_FCONTEXT (decl) = basetype;
697 DECL_FIELD_SIZE (decl) = 0;
698 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
699 TREE_CHAIN (decl) = vbase_decls;
700 BINFO_VPTR_FIELD (base_binfo) = decl;
701 vbase_decls = decl;
702
703 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
704 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
705 {
706 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
707 "destructor `%s' non-virtual");
708 warning ("in inheritance relationship `%s: virtual %s'",
709 TYPE_NAME_STRING (rec),
710 TYPE_NAME_STRING (basetype));
711 }
712 got_it:
713 /* The space this decl occupies has already been accounted for. */
714 continue;
715 }
716
717 if (const_size == 0)
718 offset = integer_zero_node;
719 else
720 {
721 /* Give each base type the alignment it wants. */
722 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
723 * TYPE_ALIGN (basetype);
724 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
725
726 #if 0
727 /* bpk: Disabled this check until someone is willing to
728 claim it as theirs and explain exactly what circumstances
729 warrant the warning. */
730 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
731 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0)) == NULL_TREE)
732 {
733 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 0),
734 "destructor `%s' non-virtual");
735 warning ("in inheritance relationship `%s:%s %s'",
736 TYPE_NAME_STRING (rec),
737 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
738 TYPE_NAME_STRING (basetype));
739 }
740 #endif
741 }
742 BINFO_OFFSET (base_binfo) = offset;
743 if (CLASSTYPE_VSIZE (basetype))
744 {
745 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
746 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
747 }
748 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
749 TYPE_BINFO (rec) = base_binfo;
750
751 /* Add only the amount of storage not present in
752 the virtual baseclasses. */
753
754 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
755 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
756 {
757 inc = MAX (record_align,
758 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
759 - int_vbase_size));
760
761 /* Record must have at least as much alignment as any field. */
762 desired_align = TYPE_ALIGN (basetype);
763 record_align = MAX (record_align, desired_align);
764
765 const_size += inc;
766 }
767 }
768
769 if (const_size)
770 CLASSTYPE_SIZE (rec) = size_int (const_size);
771 else
772 CLASSTYPE_SIZE (rec) = integer_zero_node;
773 CLASSTYPE_ALIGN (rec) = record_align;
774
775 return vbase_decls;
776 }
777 \f
778 /* Hashing of lists so that we don't make duplicates.
779 The entry point is `list_hash_canon'. */
780
781 /* Each hash table slot is a bucket containing a chain
782 of these structures. */
783
784 struct list_hash
785 {
786 struct list_hash *next; /* Next structure in the bucket. */
787 int hashcode; /* Hash code of this list. */
788 tree list; /* The list recorded here. */
789 };
790
791 /* Now here is the hash table. When recording a list, it is added
792 to the slot whose index is the hash code mod the table size.
793 Note that the hash table is used for several kinds of lists.
794 While all these live in the same table, they are completely independent,
795 and the hash code is computed differently for each of these. */
796
797 #define TYPE_HASH_SIZE 59
798 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
799
800 /* Compute a hash code for a list (chain of TREE_LIST nodes
801 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
802 TREE_COMMON slots), by adding the hash codes of the individual entries. */
803
804 int
805 list_hash (list)
806 tree list;
807 {
808 register int hashcode = 0;
809
810 if (TREE_CHAIN (list))
811 hashcode += TYPE_HASH (TREE_CHAIN (list));
812
813 if (TREE_VALUE (list))
814 hashcode += TYPE_HASH (TREE_VALUE (list));
815 else
816 hashcode += 1007;
817 if (TREE_PURPOSE (list))
818 hashcode += TYPE_HASH (TREE_PURPOSE (list));
819 else
820 hashcode += 1009;
821 return hashcode;
822 }
823
824 /* Look in the type hash table for a type isomorphic to TYPE.
825 If one is found, return it. Otherwise return 0. */
826
827 tree
828 list_hash_lookup (hashcode, list)
829 int hashcode;
830 tree list;
831 {
832 register struct list_hash *h;
833 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
834 if (h->hashcode == hashcode
835 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
836 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
837 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
838 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
839 && TREE_VALUE (h->list) == TREE_VALUE (list)
840 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
841 {
842 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
843 return h->list;
844 }
845 return 0;
846 }
847
848 /* Add an entry to the list-hash-table
849 for a list TYPE whose hash code is HASHCODE. */
850
851 void
852 list_hash_add (hashcode, list)
853 int hashcode;
854 tree list;
855 {
856 register struct list_hash *h;
857
858 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
859 h->hashcode = hashcode;
860 h->list = list;
861 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
862 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
863 }
864
865 /* Given TYPE, and HASHCODE its hash code, return the canonical
866 object for an identical list if one already exists.
867 Otherwise, return TYPE, and record it as the canonical object
868 if it is a permanent object.
869
870 To use this function, first create a list of the sort you want.
871 Then compute its hash code from the fields of the list that
872 make it different from other similar lists.
873 Then call this function and use the value.
874 This function frees the list you pass in if it is a duplicate. */
875
876 /* Set to 1 to debug without canonicalization. Never set by program. */
877 static int debug_no_list_hash = 0;
878
879 tree
880 list_hash_canon (hashcode, list)
881 int hashcode;
882 tree list;
883 {
884 tree t1;
885
886 if (debug_no_list_hash)
887 return list;
888
889 t1 = list_hash_lookup (hashcode, list);
890 if (t1 != 0)
891 {
892 obstack_free (&class_obstack, list);
893 return t1;
894 }
895
896 /* If this is a new list, record it for later reuse. */
897 list_hash_add (hashcode, list);
898
899 return list;
900 }
901
902 tree
903 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
904 int via_public, via_virtual, via_protected;
905 tree purpose, value, chain;
906 {
907 struct obstack *ambient_obstack = current_obstack;
908 tree t;
909 int hashcode;
910
911 current_obstack = &class_obstack;
912 t = tree_cons (purpose, value, chain);
913 TREE_VIA_PUBLIC (t) = via_public;
914 TREE_VIA_PROTECTED (t) = via_protected;
915 TREE_VIA_VIRTUAL (t) = via_virtual;
916 hashcode = list_hash (t);
917 t = list_hash_canon (hashcode, t);
918 current_obstack = ambient_obstack;
919 return t;
920 }
921
922 /* Constructor for hashed lists. */
923 tree
924 hash_tree_chain (value, chain)
925 tree value, chain;
926 {
927 struct obstack *ambient_obstack = current_obstack;
928 tree t;
929 int hashcode;
930
931 current_obstack = &class_obstack;
932 t = tree_cons (NULL_TREE, value, chain);
933 hashcode = list_hash (t);
934 t = list_hash_canon (hashcode, t);
935 current_obstack = ambient_obstack;
936 return t;
937 }
938
939 /* Similar, but used for concatenating two lists. */
940 tree
941 hash_chainon (list1, list2)
942 tree list1, list2;
943 {
944 if (list2 == 0)
945 return list1;
946 if (list1 == 0)
947 return list2;
948 if (TREE_CHAIN (list1) == NULL_TREE)
949 return hash_tree_chain (TREE_VALUE (list1), list2);
950 return hash_tree_chain (TREE_VALUE (list1),
951 hash_chainon (TREE_CHAIN (list1), list2));
952 }
953
954 static tree
955 get_identifier_list (value)
956 tree value;
957 {
958 tree list = IDENTIFIER_AS_LIST (value);
959 if (list != NULL_TREE
960 && (TREE_CODE (list) != TREE_LIST
961 || TREE_VALUE (list) != value))
962 list = NULL_TREE;
963 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
964 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
965 && IDENTIFIER_TYPE_VALUE (value)
966 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
967 {
968 tree type = IDENTIFIER_TYPE_VALUE (value);
969
970 if (TYPE_PTRMEMFUNC_P (type))
971 list = NULL_TREE;
972 else if (type == current_class_type)
973 /* Don't mess up the constructor name. */
974 list = tree_cons (NULL_TREE, value, NULL_TREE);
975 else
976 {
977 register tree id;
978 /* This will return the correct thing for regular types,
979 nested types, and templates. Yay! */
980 if (TYPE_NESTED_NAME (type))
981 id = TYPE_NESTED_NAME (type);
982 else
983 id = TYPE_IDENTIFIER (type);
984
985 if (CLASSTYPE_ID_AS_LIST (type) == NULL_TREE)
986 CLASSTYPE_ID_AS_LIST (type)
987 = perm_tree_cons (NULL_TREE, id, NULL_TREE);
988 list = CLASSTYPE_ID_AS_LIST (type);
989 }
990 }
991 return list;
992 }
993
994 tree
995 get_decl_list (value)
996 tree value;
997 {
998 tree list = NULL_TREE;
999
1000 if (TREE_CODE (value) == IDENTIFIER_NODE)
1001 list = get_identifier_list (value);
1002 else if (TREE_CODE (value) == RECORD_TYPE
1003 && TYPE_LANG_SPECIFIC (value))
1004 list = CLASSTYPE_AS_LIST (value);
1005
1006 if (list != NULL_TREE)
1007 {
1008 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1009 return list;
1010 }
1011
1012 return build_decl_list (NULL_TREE, value);
1013 }
1014
1015 /* Look in the type hash table for a type isomorphic to
1016 `build_tree_list (NULL_TREE, VALUE)'.
1017 If one is found, return it. Otherwise return 0. */
1018
1019 tree
1020 list_hash_lookup_or_cons (value)
1021 tree value;
1022 {
1023 register int hashcode = TYPE_HASH (value);
1024 register struct list_hash *h;
1025 struct obstack *ambient_obstack;
1026 tree list = NULL_TREE;
1027
1028 if (TREE_CODE (value) == IDENTIFIER_NODE)
1029 list = get_identifier_list (value);
1030 else if (TREE_CODE (value) == TYPE_DECL
1031 && TREE_CODE (TREE_TYPE (value)) == RECORD_TYPE
1032 && TYPE_LANG_SPECIFIC (TREE_TYPE (value)))
1033 list = CLASSTYPE_ID_AS_LIST (TREE_TYPE (value));
1034 else if (TREE_CODE (value) == RECORD_TYPE
1035 && TYPE_LANG_SPECIFIC (value))
1036 list = CLASSTYPE_AS_LIST (value);
1037
1038 if (list != NULL_TREE)
1039 {
1040 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 302);
1041 return list;
1042 }
1043
1044 if (debug_no_list_hash)
1045 return hash_tree_chain (value, NULL_TREE);
1046
1047 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1048 if (h->hashcode == hashcode
1049 && TREE_VIA_VIRTUAL (h->list) == 0
1050 && TREE_VIA_PUBLIC (h->list) == 0
1051 && TREE_VIA_PROTECTED (h->list) == 0
1052 && TREE_PURPOSE (h->list) == 0
1053 && TREE_VALUE (h->list) == value)
1054 {
1055 my_friendly_assert (TREE_TYPE (h->list) == 0, 303);
1056 my_friendly_assert (TREE_CHAIN (h->list) == 0, 304);
1057 return h->list;
1058 }
1059
1060 ambient_obstack = current_obstack;
1061 current_obstack = &class_obstack;
1062 list = build_tree_list (NULL_TREE, value);
1063 list_hash_add (hashcode, list);
1064 current_obstack = ambient_obstack;
1065 return list;
1066 }
1067 \f
1068 /* Build an association between TYPE and some parameters:
1069
1070 OFFSET is the offset added to `this' to convert it to a pointer
1071 of type `TYPE *'
1072
1073 BINFO is the base binfo to use, if we are deriving from one. This
1074 is necessary, as we want specialized parent binfos from base
1075 classes, so that the VTABLE_NAMEs of bases are for the most derived
1076 type, instead of of the simple type.
1077
1078 VTABLE is the virtual function table with which to initialize
1079 sub-objects of type TYPE.
1080
1081 VIRTUALS are the virtual functions sitting in VTABLE.
1082
1083 CHAIN are more associations we must retain. */
1084
1085 tree
1086 make_binfo (offset, binfo, vtable, virtuals, chain)
1087 tree offset, binfo;
1088 tree vtable, virtuals;
1089 tree chain;
1090 {
1091 tree new_binfo = make_tree_vec (6);
1092 tree type;
1093
1094 if (TREE_CODE (binfo) == TREE_VEC)
1095 type = BINFO_TYPE (binfo);
1096 else
1097 {
1098 type = binfo;
1099 binfo = TYPE_BINFO (binfo);
1100 }
1101
1102 TREE_CHAIN (new_binfo) = chain;
1103 if (chain)
1104 TREE_USED (new_binfo) = TREE_USED (chain);
1105
1106 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1107 BINFO_OFFSET (new_binfo) = offset;
1108 BINFO_VTABLE (new_binfo) = vtable;
1109 BINFO_VIRTUALS (new_binfo) = virtuals;
1110 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1111
1112 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1113 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1114 return new_binfo;
1115 }
1116
1117 tree
1118 copy_binfo (list)
1119 tree list;
1120 {
1121 tree binfo = copy_list (list);
1122 tree rval = binfo;
1123 while (binfo)
1124 {
1125 TREE_USED (binfo) = 0;
1126 if (BINFO_BASETYPES (binfo))
1127 BINFO_BASETYPES (binfo) = copy_node (BINFO_BASETYPES (binfo));
1128 binfo = TREE_CHAIN (binfo);
1129 }
1130 return rval;
1131 }
1132
1133 /* Return the binfo value for ELEM in TYPE. */
1134
1135 tree
1136 binfo_value (elem, type)
1137 tree elem;
1138 tree type;
1139 {
1140 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1141 compiler_error ("base class `%s' ambiguous in binfo_value",
1142 TYPE_NAME_STRING (elem));
1143 if (elem == type)
1144 return TYPE_BINFO (type);
1145 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1146 return type;
1147 return get_binfo (elem, type, 0);
1148 }
1149
1150 tree
1151 reverse_path (path)
1152 tree path;
1153 {
1154 register tree prev = 0, tmp, next;
1155 for (tmp = path; tmp; tmp = next)
1156 {
1157 next = BINFO_INHERITANCE_CHAIN (tmp);
1158 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1159 prev = tmp;
1160 }
1161 return prev;
1162 }
1163
1164 tree
1165 virtual_member (elem, list)
1166 tree elem;
1167 tree list;
1168 {
1169 tree t;
1170 tree rval, nval;
1171
1172 for (t = list; t; t = TREE_CHAIN (t))
1173 if (elem == BINFO_TYPE (t))
1174 return t;
1175 rval = 0;
1176 for (t = list; t; t = TREE_CHAIN (t))
1177 {
1178 tree binfos = BINFO_BASETYPES (t);
1179 int i;
1180
1181 if (binfos != NULL_TREE)
1182 for (i = TREE_VEC_LENGTH (binfos)-1; i >= 0; i--)
1183 {
1184 nval = binfo_value (elem, BINFO_TYPE (TREE_VEC_ELT (binfos, i)));
1185 if (nval)
1186 {
1187 if (rval && BINFO_OFFSET (nval) != BINFO_OFFSET (rval))
1188 my_friendly_abort (104);
1189 rval = nval;
1190 }
1191 }
1192 }
1193 return rval;
1194 }
1195
1196 void
1197 debug_binfo (elem)
1198 tree elem;
1199 {
1200 int i;
1201 tree virtuals;
1202
1203 fprintf (stderr, "type \"%s\"; offset = %d\n",
1204 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1205 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1206 fprintf (stderr, "vtable type:\n");
1207 debug_tree (BINFO_TYPE (elem));
1208 if (BINFO_VTABLE (elem))
1209 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1210 else
1211 fprintf (stderr, "no vtable decl yet\n");
1212 fprintf (stderr, "virtuals:\n");
1213 virtuals = BINFO_VIRTUALS (elem);
1214 if (virtuals != 0)
1215 {
1216 virtuals = TREE_CHAIN (virtuals);
1217 if (flag_dossier)
1218 virtuals = TREE_CHAIN (virtuals);
1219 }
1220 i = 1;
1221 while (virtuals)
1222 {
1223 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1224 fprintf (stderr, "%s [%d =? %d]\n",
1225 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1226 i, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1227 virtuals = TREE_CHAIN (virtuals);
1228 i += 1;
1229 }
1230 }
1231
1232 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1233 We expect a null pointer to mark the end of the chain.
1234 This is the Lisp primitive `length'. */
1235
1236 int
1237 decl_list_length (t)
1238 tree t;
1239 {
1240 register tree tail;
1241 register int len = 0;
1242
1243 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1244 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1245 for (tail = t; tail; tail = DECL_CHAIN (tail))
1246 len++;
1247
1248 return len;
1249 }
1250
1251 int
1252 count_functions (t)
1253 tree t;
1254 {
1255 if (TREE_CODE (t) == FUNCTION_DECL)
1256 return 1;
1257 else if (TREE_CODE (t) == TREE_LIST)
1258 return decl_list_length (TREE_VALUE (t));
1259
1260 my_friendly_abort (359);
1261 return 0;
1262 }
1263
1264 /* Like value_member, but for DECL_CHAINs. */
1265 tree
1266 decl_value_member (elem, list)
1267 tree elem, list;
1268 {
1269 while (list)
1270 {
1271 if (elem == list)
1272 return list;
1273 list = DECL_CHAIN (list);
1274 }
1275 return NULL_TREE;
1276 }
1277
1278 int
1279 is_overloaded_fn (x)
1280 tree x;
1281 {
1282 if (TREE_CODE (x) == FUNCTION_DECL)
1283 return 1;
1284
1285 if (TREE_CODE (x) == TREE_LIST
1286 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1287 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1288 return 1;
1289
1290 return 0;
1291 }
1292
1293 int
1294 really_overloaded_fn (x)
1295 tree x;
1296 {
1297 if (TREE_CODE (x) == TREE_LIST
1298 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1299 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1300 return 1;
1301
1302 return 0;
1303 }
1304
1305 tree
1306 get_first_fn (from)
1307 tree from;
1308 {
1309 if (TREE_CODE (from) == FUNCTION_DECL)
1310 return from;
1311
1312 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1313
1314 return TREE_VALUE (from);
1315 }
1316
1317 tree
1318 fnaddr_from_vtable_entry (entry)
1319 tree entry;
1320 {
1321 if (flag_vtable_thunks)
1322 {
1323 tree func = entry;
1324 if (TREE_CODE (func) == ADDR_EXPR)
1325 func = TREE_OPERAND (func, 0);
1326 if (TREE_CODE (func) == THUNK_DECL)
1327 return DECL_INITIAL (func);
1328 else
1329 return entry;
1330 }
1331 else
1332 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1333 }
1334
1335 void
1336 set_fnaddr_from_vtable_entry (entry, value)
1337 tree entry, value;
1338 {
1339 if (flag_vtable_thunks)
1340 abort ();
1341 else
1342 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry)))) = value;
1343 }
1344
1345 tree
1346 function_arg_chain (t)
1347 tree t;
1348 {
1349 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1350 }
1351
1352 int
1353 promotes_to_aggr_type (t, code)
1354 tree t;
1355 enum tree_code code;
1356 {
1357 if (TREE_CODE (t) == code)
1358 t = TREE_TYPE (t);
1359 return IS_AGGR_TYPE (t);
1360 }
1361
1362 int
1363 is_aggr_type_2 (t1, t2)
1364 tree t1, t2;
1365 {
1366 if (TREE_CODE (t1) != TREE_CODE (t2))
1367 return 0;
1368 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1369 }
1370
1371 /* Give message using types TYPE1 and TYPE2 as arguments.
1372 PFN is the function which will print the message;
1373 S is the format string for PFN to use. */
1374 void
1375 message_2_types (pfn, s, type1, type2)
1376 void (*pfn) ();
1377 char *s;
1378 tree type1, type2;
1379 {
1380 tree name1 = TYPE_NAME (type1);
1381 tree name2 = TYPE_NAME (type2);
1382 if (TREE_CODE (name1) == TYPE_DECL)
1383 name1 = DECL_NAME (name1);
1384 if (TREE_CODE (name2) == TYPE_DECL)
1385 name2 = DECL_NAME (name2);
1386 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1387 }
1388 \f
1389 #define PRINT_RING_SIZE 4
1390
1391 char *
1392 lang_printable_name (decl)
1393 tree decl;
1394 {
1395 static tree decl_ring[PRINT_RING_SIZE];
1396 static char *print_ring[PRINT_RING_SIZE];
1397 static int ring_counter;
1398 int i;
1399
1400 /* Only cache functions. */
1401 if (TREE_CODE (decl) != FUNCTION_DECL
1402 || DECL_LANG_SPECIFIC (decl) == 0)
1403 return decl_as_string (decl, 1);
1404
1405 /* See if this print name is lying around. */
1406 for (i = 0; i < PRINT_RING_SIZE; i++)
1407 if (decl_ring[i] == decl)
1408 /* yes, so return it. */
1409 return print_ring[i];
1410
1411 if (++ring_counter == PRINT_RING_SIZE)
1412 ring_counter = 0;
1413
1414 if (current_function_decl != NULL_TREE)
1415 {
1416 if (decl_ring[ring_counter] == current_function_decl)
1417 ring_counter += 1;
1418 if (ring_counter == PRINT_RING_SIZE)
1419 ring_counter = 0;
1420 if (decl_ring[ring_counter] == current_function_decl)
1421 my_friendly_abort (106);
1422 }
1423
1424 if (print_ring[ring_counter])
1425 free (print_ring[ring_counter]);
1426
1427 {
1428 int print_ret_type_p
1429 = (!DECL_CONSTRUCTOR_P (decl)
1430 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1431
1432 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1433 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1434 strcpy (print_ring[ring_counter], name);
1435 decl_ring[ring_counter] = decl;
1436 }
1437 return print_ring[ring_counter];
1438 }
1439 \f
1440 /* Comparison function for sorting identifiers in RAISES lists.
1441 Note that because IDENTIFIER_NODEs are unique, we can sort
1442 them by address, saving an indirection. */
1443 static int
1444 id_cmp (p1, p2)
1445 tree *p1, *p2;
1446 {
1447 return (HOST_WIDE_INT)TREE_VALUE (*p1) - (HOST_WIDE_INT)TREE_VALUE (*p2);
1448 }
1449
1450 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1451 listed in RAISES. */
1452 tree
1453 build_exception_variant (ctype, type, raises)
1454 tree ctype, type;
1455 tree raises;
1456 {
1457 int i;
1458 tree v = TYPE_MAIN_VARIANT (type);
1459 tree t, t2, cname;
1460 tree *a = (tree *)alloca ((list_length (raises)+1) * sizeof (tree));
1461 int constp = TYPE_READONLY (type);
1462 int volatilep = TYPE_VOLATILE (type);
1463
1464 for (v = TYPE_NEXT_VARIANT (v); v; v = TYPE_NEXT_VARIANT (v))
1465 {
1466 if (TYPE_READONLY (v) != constp
1467 || TYPE_VOLATILE (v) != volatilep)
1468 continue;
1469
1470 t = raises;
1471 t2 = TYPE_RAISES_EXCEPTIONS (v);
1472 while (t && t2)
1473 {
1474 if (TREE_TYPE (t) == TREE_TYPE (t2))
1475 {
1476 t = TREE_CHAIN (t);
1477 t2 = TREE_CHAIN (t2);
1478 }
1479 else break;
1480 }
1481 if (t || t2)
1482 continue;
1483 /* List of exceptions raised matches previously found list.
1484
1485 @@ Nice to free up storage used in consing up the
1486 @@ list of exceptions raised. */
1487 return v;
1488 }
1489
1490 /* Need to build a new variant. */
1491 v = copy_node (type);
1492 TYPE_NEXT_VARIANT (v) = TYPE_NEXT_VARIANT (type);
1493 TYPE_NEXT_VARIANT (type) = v;
1494 if (raises && ! TREE_PERMANENT (raises))
1495 {
1496 push_obstacks_nochange ();
1497 end_temporary_allocation ();
1498 raises = copy_list (raises);
1499 pop_obstacks ();
1500 }
1501 TYPE_RAISES_EXCEPTIONS (v) = raises;
1502 return v;
1503 }
1504
1505 /* Subroutine of copy_to_permanent
1506
1507 Assuming T is a node build bottom-up, make it all exist on
1508 permanent obstack, if it is not permanent already. */
1509 static tree
1510 make_deep_copy (t)
1511 tree t;
1512 {
1513 enum tree_code code;
1514
1515 if (t == NULL_TREE || TREE_PERMANENT (t))
1516 return t;
1517
1518 switch (code = TREE_CODE (t))
1519 {
1520 case ERROR_MARK:
1521 return error_mark_node;
1522
1523 case VAR_DECL:
1524 case FUNCTION_DECL:
1525 case CONST_DECL:
1526 break;
1527
1528 case PARM_DECL:
1529 {
1530 tree chain = TREE_CHAIN (t);
1531 t = copy_node (t);
1532 TREE_CHAIN (t) = make_deep_copy (chain);
1533 TREE_TYPE (t) = make_deep_copy (TREE_TYPE (t));
1534 DECL_INITIAL (t) = make_deep_copy (DECL_INITIAL (t));
1535 DECL_SIZE (t) = make_deep_copy (DECL_SIZE (t));
1536 return t;
1537 }
1538
1539 case TREE_LIST:
1540 {
1541 tree chain = TREE_CHAIN (t);
1542 t = copy_node (t);
1543 TREE_PURPOSE (t) = make_deep_copy (TREE_PURPOSE (t));
1544 TREE_VALUE (t) = make_deep_copy (TREE_VALUE (t));
1545 TREE_CHAIN (t) = make_deep_copy (chain);
1546 return t;
1547 }
1548
1549 case TREE_VEC:
1550 {
1551 int len = TREE_VEC_LENGTH (t);
1552
1553 t = copy_node (t);
1554 while (len--)
1555 TREE_VEC_ELT (t, len) = make_deep_copy (TREE_VEC_ELT (t, len));
1556 return t;
1557 }
1558
1559 case INTEGER_CST:
1560 case REAL_CST:
1561 case STRING_CST:
1562 return copy_node (t);
1563
1564 case COND_EXPR:
1565 case TARGET_EXPR:
1566 case NEW_EXPR:
1567 t = copy_node (t);
1568 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1569 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1570 TREE_OPERAND (t, 2) = make_deep_copy (TREE_OPERAND (t, 2));
1571 return t;
1572
1573 case SAVE_EXPR:
1574 t = copy_node (t);
1575 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1576 return t;
1577
1578 case MODIFY_EXPR:
1579 case PLUS_EXPR:
1580 case MINUS_EXPR:
1581 case MULT_EXPR:
1582 case TRUNC_DIV_EXPR:
1583 case TRUNC_MOD_EXPR:
1584 case MIN_EXPR:
1585 case MAX_EXPR:
1586 case LSHIFT_EXPR:
1587 case RSHIFT_EXPR:
1588 case BIT_IOR_EXPR:
1589 case BIT_XOR_EXPR:
1590 case BIT_AND_EXPR:
1591 case BIT_ANDTC_EXPR:
1592 case TRUTH_ANDIF_EXPR:
1593 case TRUTH_ORIF_EXPR:
1594 case LT_EXPR:
1595 case LE_EXPR:
1596 case GT_EXPR:
1597 case GE_EXPR:
1598 case EQ_EXPR:
1599 case NE_EXPR:
1600 case CEIL_DIV_EXPR:
1601 case FLOOR_DIV_EXPR:
1602 case ROUND_DIV_EXPR:
1603 case CEIL_MOD_EXPR:
1604 case FLOOR_MOD_EXPR:
1605 case ROUND_MOD_EXPR:
1606 case COMPOUND_EXPR:
1607 case PREDECREMENT_EXPR:
1608 case PREINCREMENT_EXPR:
1609 case POSTDECREMENT_EXPR:
1610 case POSTINCREMENT_EXPR:
1611 case CALL_EXPR:
1612 t = copy_node (t);
1613 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1614 TREE_OPERAND (t, 1) = make_deep_copy (TREE_OPERAND (t, 1));
1615 return t;
1616
1617 case CONVERT_EXPR:
1618 case ADDR_EXPR:
1619 case INDIRECT_REF:
1620 case NEGATE_EXPR:
1621 case BIT_NOT_EXPR:
1622 case TRUTH_NOT_EXPR:
1623 case NOP_EXPR:
1624 case COMPONENT_REF:
1625 t = copy_node (t);
1626 TREE_OPERAND (t, 0) = make_deep_copy (TREE_OPERAND (t, 0));
1627 return t;
1628
1629 /* This list is incomplete, but should suffice for now.
1630 It is very important that `sorry' does not call
1631 `report_error_function'. That could cause an infinite loop. */
1632 default:
1633 sorry ("initializer contains unrecognized tree code");
1634 return error_mark_node;
1635
1636 }
1637 my_friendly_abort (107);
1638 /* NOTREACHED */
1639 return NULL_TREE;
1640 }
1641
1642 /* Assuming T is a node built bottom-up, make it all exist on
1643 permanent obstack, if it is not permanent already. */
1644 tree
1645 copy_to_permanent (t)
1646 tree t;
1647 {
1648 register struct obstack *ambient_obstack = current_obstack;
1649 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1650
1651 if (t == NULL_TREE || TREE_PERMANENT (t))
1652 return t;
1653
1654 saveable_obstack = &permanent_obstack;
1655 current_obstack = saveable_obstack;
1656
1657 t = make_deep_copy (t);
1658
1659 current_obstack = ambient_obstack;
1660 saveable_obstack = ambient_saveable_obstack;
1661
1662 return t;
1663 }
1664
1665 void
1666 print_lang_statistics ()
1667 {
1668 extern struct obstack maybepermanent_obstack;
1669 print_obstack_statistics ("class_obstack", &class_obstack);
1670 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1671 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1672 print_search_statistics ();
1673 print_class_statistics ();
1674 }
1675
1676 /* This is used by the `assert' macro. It is provided in libgcc.a,
1677 which `cc' doesn't know how to link. Note that the C++ front-end
1678 no longer actually uses the `assert' macro (instead, it calls
1679 my_friendly_assert). But all of the back-end files still need this. */
1680 void
1681 __eprintf (string, expression, line, filename)
1682 #ifdef __STDC__
1683 const char *string;
1684 const char *expression;
1685 unsigned line;
1686 const char *filename;
1687 #else
1688 char *string;
1689 char *expression;
1690 unsigned line;
1691 char *filename;
1692 #endif
1693 {
1694 fprintf (stderr, string, expression, line, filename);
1695 fflush (stderr);
1696 abort ();
1697 }
1698
1699 /* Return, as an INTEGER_CST node, the number of elements for
1700 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1701
1702 tree
1703 array_type_nelts_top (type)
1704 tree type;
1705 {
1706 return fold (build (PLUS_EXPR, integer_type_node,
1707 array_type_nelts (type),
1708 integer_one_node));
1709 }
1710
1711 /* Return, as an INTEGER_CST node, the number of elements for
1712 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1713 ARRAY_TYPEs that are clumped together. */
1714
1715 tree
1716 array_type_nelts_total (type)
1717 tree type;
1718 {
1719 tree sz = array_type_nelts_top (type);
1720 type = TREE_TYPE (type);
1721 while (TREE_CODE (type) == ARRAY_TYPE)
1722 {
1723 tree n = array_type_nelts_top (type);
1724 sz = fold (build (MULT_EXPR, integer_type_node, sz, n));
1725 type = TREE_TYPE (type);
1726 }
1727 return sz;
1728 }