1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
28 #define CEIL(x,y) (((x) + (y) - 1) / (y))
30 /* Return nonzero if REF is an lvalue valid for this language.
31 Lvalues can be assigned, unless they have TREE_READONLY.
32 Lvalues can have their address taken, unless they have DECL_REGISTER. */
38 if (! language_lvalue_valid (ref
))
41 if (TREE_CODE (TREE_TYPE (ref
)) == REFERENCE_TYPE
)
44 if (ref
== current_class_decl
&& flag_this_is_variable
<= 0)
47 switch (TREE_CODE (ref
))
49 /* preincrements and predecrements are valid lvals, provided
50 what they refer to are valid lvals. */
51 case PREINCREMENT_EXPR
:
52 case PREDECREMENT_EXPR
:
55 return lvalue_p (TREE_OPERAND (ref
, 0));
61 if (TREE_READONLY (ref
) && ! TREE_STATIC (ref
)
62 && DECL_LANG_SPECIFIC (ref
)
63 && DECL_IN_AGGR_P (ref
))
70 if (TREE_CODE (TREE_TYPE (ref
)) != FUNCTION_TYPE
71 && TREE_CODE (TREE_TYPE (ref
)) != METHOD_TYPE
)
75 case WITH_CLEANUP_EXPR
:
76 return lvalue_p (TREE_OPERAND (ref
, 0));
82 if (TREE_ADDRESSABLE (TREE_TYPE (ref
)))
86 /* A currently unresolved scope ref. */
88 my_friendly_abort (103);
90 if (TREE_CODE (TREE_OPERAND (ref
, 1)) == FUNCTION_DECL
)
92 return lvalue_p (TREE_OPERAND (ref
, 0))
93 && lvalue_p (TREE_OPERAND (ref
, 1));
97 return (lvalue_p (TREE_OPERAND (ref
, 1))
98 && lvalue_p (TREE_OPERAND (ref
, 2)));
104 return lvalue_p (TREE_OPERAND (ref
, 1));
110 /* Return nonzero if REF is an lvalue valid for this language;
111 otherwise, print an error message and return zero. */
114 lvalue_or_else (ref
, string
)
118 int win
= lvalue_p (ref
);
120 error ("non-lvalue in %s", string
);
124 /* INIT is a CALL_EXPR which needs info about its target.
125 TYPE is the type that this initialization should appear to have.
127 Build an encapsulation of the initialization to perform
128 and return it so that it can be processed by language-independent
129 and language-specific expression expanders.
131 If WITH_CLEANUP_P is nonzero, we build a cleanup for this expression.
132 Otherwise, cleanups are not built here. For example, when building
133 an initialization for a stack slot, since the called function handles
134 the cleanup, we would not want to do it here. */
136 build_cplus_new (type
, init
, with_cleanup_p
)
141 tree slot
= build (VAR_DECL
, type
);
142 tree rval
= build (NEW_EXPR
, type
,
143 TREE_OPERAND (init
, 0), TREE_OPERAND (init
, 1), slot
);
144 TREE_SIDE_EFFECTS (rval
) = 1;
145 TREE_ADDRESSABLE (rval
) = 1;
146 rval
= build (TARGET_EXPR
, type
, slot
, rval
, 0);
147 TREE_SIDE_EFFECTS (rval
) = 1;
148 TREE_ADDRESSABLE (rval
) = 1;
151 if (with_cleanup_p
&& TYPE_NEEDS_DESTRUCTOR (type
))
153 TREE_OPERAND (rval
, 2) = error_mark_node
;
154 rval
= build (WITH_CLEANUP_EXPR
, type
, rval
, 0,
155 build_delete (TYPE_POINTER_TO (type
),
156 build_unary_op (ADDR_EXPR
, slot
, 0),
158 LOOKUP_NORMAL
|LOOKUP_DESTRUCTOR
, 0));
159 TREE_SIDE_EFFECTS (rval
) = 1;
160 TREE_ADDRESSABLE (rval
) = 1;
166 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
167 these CALL_EXPRs with tree nodes that will perform the cleanups. */
170 break_out_cleanups (exp
)
175 if (TREE_CODE (tmp
) == CALL_EXPR
176 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp
)))
177 return build_cplus_new (TREE_TYPE (tmp
), tmp
, 1);
179 while (TREE_CODE (tmp
) == NOP_EXPR
180 || TREE_CODE (tmp
) == CONVERT_EXPR
181 || TREE_CODE (tmp
) == NON_LVALUE_EXPR
)
183 if (TREE_CODE (TREE_OPERAND (tmp
, 0)) == CALL_EXPR
184 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp
, 0))))
186 TREE_OPERAND (tmp
, 0)
187 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp
, 0)),
188 TREE_OPERAND (tmp
, 0), 1);
192 tmp
= TREE_OPERAND (tmp
, 0);
197 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
198 copies where they are found. Returns a deep copy all nodes transitively
199 containing CALL_EXPRs. */
202 break_out_calls (exp
)
205 register tree t1
, t2
;
206 register enum tree_code code
;
207 register int changed
= 0;
210 if (exp
== NULL_TREE
)
213 code
= TREE_CODE (exp
);
215 if (code
== CALL_EXPR
)
216 return copy_node (exp
);
218 /* Don't try and defeat a save_expr, as it should only be done once. */
219 if (code
== SAVE_EXPR
)
222 switch (TREE_CODE_CLASS (code
))
227 case 'c': /* a constant */
228 case 't': /* a type node */
229 case 'x': /* something random, like an identifier or an ERROR_MARK. */
232 case 'd': /* A decl node */
233 #if 0 /* This is bogus. jason 9/21/94 */
235 t1
= break_out_calls (DECL_INITIAL (exp
));
236 if (t1
!= DECL_INITIAL (exp
))
238 exp
= copy_node (exp
);
239 DECL_INITIAL (exp
) = t1
;
244 case 'b': /* A block node */
246 /* Don't know how to handle these correctly yet. Must do a
247 break_out_calls on all DECL_INITIAL values for local variables,
248 and also break_out_calls on all sub-blocks and sub-statements. */
253 case 'e': /* an expression */
254 case 'r': /* a reference */
255 case 's': /* an expression with side effects */
256 for (i
= tree_code_length
[(int) code
] - 1; i
>= 0; i
--)
258 t1
= break_out_calls (TREE_OPERAND (exp
, i
));
259 if (t1
!= TREE_OPERAND (exp
, i
))
261 exp
= copy_node (exp
);
262 TREE_OPERAND (exp
, i
) = t1
;
267 case '<': /* a comparison expression */
268 case '2': /* a binary arithmetic expression */
269 t2
= break_out_calls (TREE_OPERAND (exp
, 1));
270 if (t2
!= TREE_OPERAND (exp
, 1))
272 case '1': /* a unary arithmetic expression */
273 t1
= break_out_calls (TREE_OPERAND (exp
, 0));
274 if (t1
!= TREE_OPERAND (exp
, 0))
278 if (tree_code_length
[(int) code
] == 1)
279 return build1 (code
, TREE_TYPE (exp
), t1
);
281 return build (code
, TREE_TYPE (exp
), t1
, t2
);
288 extern struct obstack
*current_obstack
;
289 extern struct obstack permanent_obstack
, class_obstack
;
290 extern struct obstack
*saveable_obstack
;
292 /* Here is how primitive or already-canonicalized types' hash
293 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
294 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
296 /* Construct, lay out and return the type of methods belonging to class
297 BASETYPE and whose arguments are described by ARGTYPES and whose values
298 are described by RETTYPE. If each type exists already, reuse it. */
300 build_cplus_method_type (basetype
, rettype
, argtypes
)
301 tree basetype
, rettype
, argtypes
;
307 /* Make a node of the sort we want. */
308 t
= make_node (METHOD_TYPE
);
310 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
311 TREE_TYPE (t
) = rettype
;
312 if (IS_SIGNATURE (basetype
))
313 ptype
= build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype
),
314 TYPE_READONLY (basetype
),
315 TYPE_VOLATILE (basetype
));
318 ptype
= build_pointer_type (basetype
);
319 ptype
= build_type_variant (ptype
, 1, 0);
321 /* The actual arglist for this function includes a "hidden" argument
322 which is "this". Put it into the list of argument types. */
324 argtypes
= tree_cons (NULL_TREE
, ptype
, argtypes
);
325 TYPE_ARG_TYPES (t
) = argtypes
;
326 TREE_SIDE_EFFECTS (argtypes
) = 1; /* Mark first argtype as "artificial". */
328 /* If we already have such a type, use the old one and free this one.
329 Note that it also frees up the above cons cell if found. */
330 hashcode
= TYPE_HASH (basetype
) + TYPE_HASH (rettype
) + type_hash_list (argtypes
);
331 t
= type_hash_canon (hashcode
, t
);
333 if (TYPE_SIZE (t
) == 0)
340 build_cplus_staticfn_type (basetype
, rettype
, argtypes
)
341 tree basetype
, rettype
, argtypes
;
346 /* Make a node of the sort we want. */
347 t
= make_node (FUNCTION_TYPE
);
349 TYPE_METHOD_BASETYPE (t
) = TYPE_MAIN_VARIANT (basetype
);
350 TREE_TYPE (t
) = rettype
;
352 TYPE_ARG_TYPES (t
) = argtypes
;
354 /* If we already have such a type, use the old one and free this one.
355 Note that it also frees up the above cons cell if found. */
356 hashcode
= TYPE_HASH (basetype
) + TYPE_HASH (rettype
) + type_hash_list (argtypes
);
357 t
= type_hash_canon (hashcode
, t
);
359 if (TYPE_SIZE (t
) == 0)
366 build_cplus_array_type (elt_type
, index_type
)
370 register struct obstack
*ambient_obstack
= current_obstack
;
371 register struct obstack
*ambient_saveable_obstack
= saveable_obstack
;
374 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
375 make this permanent too. */
376 if (TREE_PERMANENT (elt_type
)
377 && (index_type
== 0 || TREE_PERMANENT (index_type
)))
379 current_obstack
= &permanent_obstack
;
380 saveable_obstack
= &permanent_obstack
;
383 t
= build_array_type (elt_type
, index_type
);
385 /* Push these needs up so that initialization takes place
387 TYPE_NEEDS_CONSTRUCTING (t
) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type
));
388 TYPE_NEEDS_DESTRUCTOR (t
) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type
));
389 current_obstack
= ambient_obstack
;
390 saveable_obstack
= ambient_saveable_obstack
;
394 /* Make a variant type in the proper way for C/C++, propagating qualifiers
395 down to the element type of an array. */
398 cp_build_type_variant (type
, constp
, volatilep
)
400 int constp
, volatilep
;
402 if (TREE_CODE (type
) == ARRAY_TYPE
)
404 tree real_main_variant
= TYPE_MAIN_VARIANT (type
);
406 push_obstacks (TYPE_OBSTACK (real_main_variant
),
407 TYPE_OBSTACK (real_main_variant
));
408 type
= build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type
),
412 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
413 make a copy. (TYPE might have come from the hash table and
414 REAL_MAIN_VARIANT might be in some function's obstack.) */
416 if (TYPE_OBSTACK (type
) != TYPE_OBSTACK (real_main_variant
))
418 type
= copy_node (type
);
419 TYPE_POINTER_TO (type
) = TYPE_REFERENCE_TO (type
) = 0;
422 TYPE_MAIN_VARIANT (type
) = real_main_variant
;
425 return build_type_variant (type
, constp
, volatilep
);
428 /* Add OFFSET to all base types of T.
430 OFFSET, which is a type offset, is number of bytes.
432 Note that we don't have to worry about having two paths to the
433 same base type, since this type owns its association list. */
435 propagate_binfo_offsets (binfo
, offset
)
439 tree binfos
= BINFO_BASETYPES (binfo
);
440 int i
, n_baselinks
= binfos
? TREE_VEC_LENGTH (binfos
) : 0;
442 for (i
= 0; i
< n_baselinks
; /* note increment is done in the loop. */)
444 tree base_binfo
= TREE_VEC_ELT (binfos
, i
);
446 if (TREE_VIA_VIRTUAL (base_binfo
))
451 tree base_binfos
= BINFO_BASETYPES (base_binfo
);
454 for (j
= i
+1; j
< n_baselinks
; j
++)
455 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos
, j
)))
457 /* The next basetype offset must take into account the space
458 between the classes, not just the size of each class. */
459 delta
= size_binop (MINUS_EXPR
,
460 BINFO_OFFSET (TREE_VEC_ELT (binfos
, j
)),
461 BINFO_OFFSET (base_binfo
));
466 if (BINFO_OFFSET_ZEROP (base_binfo
))
467 BINFO_OFFSET (base_binfo
) = offset
;
469 BINFO_OFFSET (base_binfo
)
470 = size_binop (PLUS_EXPR
, BINFO_OFFSET (base_binfo
), offset
);
472 BINFO_OFFSET (base_binfo
) = offset
;
477 tree chain
= NULL_TREE
;
479 /* Now unshare the structure beneath BASE_BINFO. */
480 for (k
= TREE_VEC_LENGTH (base_binfos
)-1;
483 tree base_base_binfo
= TREE_VEC_ELT (base_binfos
, k
);
484 if (! TREE_VIA_VIRTUAL (base_base_binfo
))
485 TREE_VEC_ELT (base_binfos
, k
)
486 = make_binfo (BINFO_OFFSET (base_base_binfo
),
488 BINFO_VTABLE (base_base_binfo
),
489 BINFO_VIRTUALS (base_base_binfo
),
491 chain
= TREE_VEC_ELT (base_binfos
, k
);
492 TREE_VIA_PUBLIC (chain
) = TREE_VIA_PUBLIC (base_base_binfo
);
493 TREE_VIA_PROTECTED (chain
) = TREE_VIA_PROTECTED (base_base_binfo
);
495 /* Now propagate the offset to the base types. */
496 propagate_binfo_offsets (base_binfo
, offset
);
499 /* Go to our next class that counts for offset propagation. */
502 offset
= size_binop (PLUS_EXPR
, offset
, delta
);
507 /* Compute the actual offsets that our virtual base classes
508 will have *for this type*. This must be performed after
509 the fields are laid out, since virtual baseclasses must
510 lay down at the end of the record.
512 Returns the maximum number of virtual functions any of the virtual
513 baseclasses provide. */
515 layout_vbasetypes (rec
, max
)
519 /* Get all the virtual base types that this type uses.
520 The TREE_VALUE slot holds the virtual baseclass type. */
521 tree vbase_types
= get_vbase_types (rec
);
523 #ifdef STRUCTURE_SIZE_BOUNDARY
524 unsigned record_align
= MAX (STRUCTURE_SIZE_BOUNDARY
, TYPE_ALIGN (rec
));
526 unsigned record_align
= MAX (BITS_PER_UNIT
, TYPE_ALIGN (rec
));
530 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
531 where CONST_SIZE is an integer
532 and VAR_SIZE is a tree expression.
533 If VAR_SIZE is null, the size is just CONST_SIZE.
534 Naturally we try to avoid using VAR_SIZE. */
535 register unsigned const_size
= 0;
536 register tree var_size
= 0;
537 int nonvirtual_const_size
;
538 tree nonvirtual_var_size
;
540 CLASSTYPE_VBASECLASSES (rec
) = vbase_types
;
542 if (TREE_CODE (TYPE_SIZE (rec
)) == INTEGER_CST
)
543 const_size
= TREE_INT_CST_LOW (TYPE_SIZE (rec
));
545 var_size
= TYPE_SIZE (rec
);
547 nonvirtual_const_size
= const_size
;
548 nonvirtual_var_size
= var_size
;
552 tree basetype
= BINFO_TYPE (vbase_types
);
555 desired_align
= TYPE_ALIGN (basetype
);
556 record_align
= MAX (record_align
, desired_align
);
559 offset
= integer_zero_node
;
562 /* Give each virtual base type the alignment it wants. */
563 const_size
= CEIL (const_size
, TYPE_ALIGN (basetype
))
564 * TYPE_ALIGN (basetype
);
565 offset
= size_int (CEIL (const_size
, BITS_PER_UNIT
));
568 if (CLASSTYPE_VSIZE (basetype
) > max
)
569 max
= CLASSTYPE_VSIZE (basetype
);
570 BINFO_OFFSET (vbase_types
) = offset
;
572 if (TREE_CODE (TYPE_SIZE (basetype
)) == INTEGER_CST
)
573 const_size
+= MAX (BITS_PER_UNIT
,
574 TREE_INT_CST_LOW (TYPE_SIZE (basetype
))
575 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype
)));
576 else if (var_size
== 0)
577 var_size
= TYPE_SIZE (basetype
);
579 var_size
= size_binop (PLUS_EXPR
, var_size
, TYPE_SIZE (basetype
));
581 vbase_types
= TREE_CHAIN (vbase_types
);
584 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
585 here, as that is for this class, without any virtual base classes. */
586 TYPE_ALIGN (rec
) = record_align
;
587 if (const_size
!= nonvirtual_const_size
)
589 CLASSTYPE_VBASE_SIZE (rec
)
590 = size_int (const_size
- nonvirtual_const_size
);
591 TYPE_SIZE (rec
) = size_int (const_size
);
594 /* Now propagate offset information throughout the lattice
595 under the vbase type. */
596 for (vbase_types
= CLASSTYPE_VBASECLASSES (rec
); vbase_types
;
597 vbase_types
= TREE_CHAIN (vbase_types
))
599 tree base_binfos
= BINFO_BASETYPES (vbase_types
);
603 tree chain
= NULL_TREE
;
605 /* Now unshare the structure beneath BASE_BINFO. */
607 for (j
= TREE_VEC_LENGTH (base_binfos
)-1;
610 tree base_base_binfo
= TREE_VEC_ELT (base_binfos
, j
);
611 if (! TREE_VIA_VIRTUAL (base_base_binfo
))
612 TREE_VEC_ELT (base_binfos
, j
)
613 = make_binfo (BINFO_OFFSET (base_base_binfo
),
615 BINFO_VTABLE (base_base_binfo
),
616 BINFO_VIRTUALS (base_base_binfo
),
618 chain
= TREE_VEC_ELT (base_binfos
, j
);
619 TREE_VIA_PUBLIC (chain
) = TREE_VIA_PUBLIC (base_base_binfo
);
620 TREE_VIA_PROTECTED (chain
) = TREE_VIA_PROTECTED (base_base_binfo
);
623 propagate_binfo_offsets (vbase_types
, BINFO_OFFSET (vbase_types
));
630 /* Lay out the base types of a record type, REC.
631 Tentatively set the size and alignment of REC
632 according to the base types alone.
634 Offsets for immediate nonvirtual baseclasses are also computed here.
636 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
637 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
639 Returns list of virtual base classes in a FIELD_DECL chain. */
641 layout_basetypes (rec
, binfos
)
644 /* Chain to hold all the new FIELD_DECLs which point at virtual
646 tree vbase_decls
= NULL_TREE
;
648 #ifdef STRUCTURE_SIZE_BOUNDARY
649 unsigned record_align
= MAX (STRUCTURE_SIZE_BOUNDARY
, TYPE_ALIGN (rec
));
651 unsigned record_align
= MAX (BITS_PER_UNIT
, TYPE_ALIGN (rec
));
654 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
655 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
656 the size is just CONST_SIZE. Naturally we try to avoid using
657 VAR_SIZE. And so far, we've been sucessful. */
659 register tree var_size
= 0;
662 register unsigned const_size
= 0;
663 int i
, n_baseclasses
= binfos
? TREE_VEC_LENGTH (binfos
) : 0;
665 /* Handle basetypes almost like fields, but record their
666 offsets differently. */
668 for (i
= 0; i
< n_baseclasses
; i
++)
670 int inc
, desired_align
, int_vbase_size
;
671 register tree base_binfo
= TREE_VEC_ELT (binfos
, i
);
672 register tree basetype
= BINFO_TYPE (base_binfo
);
675 if (TYPE_SIZE (basetype
) == 0)
678 /* This error is now reported in xref_tag, thus giving better
679 location information. */
680 error_with_aggr_type (base_binfo
,
681 "base class `%s' has incomplete type");
683 TREE_VIA_PUBLIC (base_binfo
) = 1;
684 TREE_VIA_PROTECTED (base_binfo
) = 0;
685 TREE_VIA_VIRTUAL (base_binfo
) = 0;
687 /* Should handle this better so that
690 class B: private A { virtual void F(); };
692 does not dump core when compiled. */
693 my_friendly_abort (121);
698 /* All basetypes are recorded in the association list of the
701 if (TREE_VIA_VIRTUAL (base_binfo
))
704 char *name
= (char *)alloca (TYPE_NAME_LENGTH (basetype
)
705 + sizeof (VBASE_NAME
) + 1);
707 /* The offset for a virtual base class is only used in computing
708 virtual function tables and for initializing virtual base
709 pointers. It is built once `get_vbase_types' is called. */
711 /* If this basetype can come from another vbase pointer
712 without an additional indirection, we will share
713 that pointer. If an indirection is involved, we
714 make our own pointer. */
715 for (j
= 0; j
< n_baseclasses
; j
++)
717 tree other_base_binfo
= TREE_VEC_ELT (binfos
, j
);
718 if (! TREE_VIA_VIRTUAL (other_base_binfo
)
719 && binfo_member (basetype
,
720 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo
))))
723 sprintf (name
, VBASE_NAME_FORMAT
, TYPE_NAME_STRING (basetype
));
724 decl
= build_lang_decl (FIELD_DECL
, get_identifier (name
),
725 build_pointer_type (basetype
));
726 /* If you change any of the below, take a look at all the
727 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
729 DECL_ASSEMBLER_NAME (decl
) = get_identifier (VTABLE_BASE
);
730 DECL_VIRTUAL_P (decl
) = 1;
731 DECL_FIELD_CONTEXT (decl
) = rec
;
732 DECL_CLASS_CONTEXT (decl
) = rec
;
733 DECL_FCONTEXT (decl
) = basetype
;
734 DECL_FIELD_SIZE (decl
) = 0;
735 DECL_ALIGN (decl
) = TYPE_ALIGN (ptr_type_node
);
736 TREE_CHAIN (decl
) = vbase_decls
;
737 BINFO_VPTR_FIELD (base_binfo
) = decl
;
740 if (warn_nonvdtor
&& TYPE_HAS_DESTRUCTOR (basetype
)
741 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype
), 0)) == NULL_TREE
)
743 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype
), 0),
744 "destructor `%s' non-virtual");
745 warning ("in inheritance relationship `%s: virtual %s'",
746 TYPE_NAME_STRING (rec
),
747 TYPE_NAME_STRING (basetype
));
750 /* The space this decl occupies has already been accounted for. */
755 offset
= integer_zero_node
;
758 /* Give each base type the alignment it wants. */
759 const_size
= CEIL (const_size
, TYPE_ALIGN (basetype
))
760 * TYPE_ALIGN (basetype
);
761 offset
= size_int ((const_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
764 /* bpk: Disabled this check until someone is willing to
765 claim it as theirs and explain exactly what circumstances
766 warrant the warning. */
767 if (warn_nonvdtor
&& TYPE_HAS_DESTRUCTOR (basetype
)
768 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype
), 0)) == NULL_TREE
)
770 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype
), 0),
771 "destructor `%s' non-virtual");
772 warning ("in inheritance relationship `%s:%s %s'",
773 TYPE_NAME_STRING (rec
),
774 TREE_VIA_VIRTUAL (base_binfo
) ? " virtual" : "",
775 TYPE_NAME_STRING (basetype
));
779 BINFO_OFFSET (base_binfo
) = offset
;
780 if (CLASSTYPE_VSIZE (basetype
))
782 BINFO_VTABLE (base_binfo
) = TYPE_BINFO_VTABLE (basetype
);
783 BINFO_VIRTUALS (base_binfo
) = TYPE_BINFO_VIRTUALS (basetype
);
785 TREE_CHAIN (base_binfo
) = TYPE_BINFO (rec
);
786 TYPE_BINFO (rec
) = base_binfo
;
788 /* Add only the amount of storage not present in
789 the virtual baseclasses. */
791 int_vbase_size
= TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype
));
792 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype
)) > int_vbase_size
)
794 inc
= MAX (record_align
,
795 (TREE_INT_CST_LOW (TYPE_SIZE (basetype
))
798 /* Record must have at least as much alignment as any field. */
799 desired_align
= TYPE_ALIGN (basetype
);
800 record_align
= MAX (record_align
, desired_align
);
807 CLASSTYPE_SIZE (rec
) = size_int (const_size
);
809 CLASSTYPE_SIZE (rec
) = integer_zero_node
;
810 CLASSTYPE_ALIGN (rec
) = record_align
;
815 /* Hashing of lists so that we don't make duplicates.
816 The entry point is `list_hash_canon'. */
818 /* Each hash table slot is a bucket containing a chain
819 of these structures. */
823 struct list_hash
*next
; /* Next structure in the bucket. */
824 int hashcode
; /* Hash code of this list. */
825 tree list
; /* The list recorded here. */
828 /* Now here is the hash table. When recording a list, it is added
829 to the slot whose index is the hash code mod the table size.
830 Note that the hash table is used for several kinds of lists.
831 While all these live in the same table, they are completely independent,
832 and the hash code is computed differently for each of these. */
834 #define TYPE_HASH_SIZE 59
835 struct list_hash
*list_hash_table
[TYPE_HASH_SIZE
];
837 /* Compute a hash code for a list (chain of TREE_LIST nodes
838 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
839 TREE_COMMON slots), by adding the hash codes of the individual entries. */
845 register int hashcode
= 0;
847 if (TREE_CHAIN (list
))
848 hashcode
+= TYPE_HASH (TREE_CHAIN (list
));
850 if (TREE_VALUE (list
))
851 hashcode
+= TYPE_HASH (TREE_VALUE (list
));
854 if (TREE_PURPOSE (list
))
855 hashcode
+= TYPE_HASH (TREE_PURPOSE (list
));
861 /* Look in the type hash table for a type isomorphic to TYPE.
862 If one is found, return it. Otherwise return 0. */
865 list_hash_lookup (hashcode
, list
)
869 register struct list_hash
*h
;
870 for (h
= list_hash_table
[hashcode
% TYPE_HASH_SIZE
]; h
; h
= h
->next
)
871 if (h
->hashcode
== hashcode
872 && TREE_VIA_VIRTUAL (h
->list
) == TREE_VIA_VIRTUAL (list
)
873 && TREE_VIA_PUBLIC (h
->list
) == TREE_VIA_PUBLIC (list
)
874 && TREE_VIA_PROTECTED (h
->list
) == TREE_VIA_PROTECTED (list
)
875 && TREE_PURPOSE (h
->list
) == TREE_PURPOSE (list
)
876 && TREE_VALUE (h
->list
) == TREE_VALUE (list
)
877 && TREE_CHAIN (h
->list
) == TREE_CHAIN (list
))
879 my_friendly_assert (TREE_TYPE (h
->list
) == TREE_TYPE (list
), 299);
885 /* Add an entry to the list-hash-table
886 for a list TYPE whose hash code is HASHCODE. */
889 list_hash_add (hashcode
, list
)
893 register struct list_hash
*h
;
895 h
= (struct list_hash
*) obstack_alloc (&class_obstack
, sizeof (struct list_hash
));
896 h
->hashcode
= hashcode
;
898 h
->next
= list_hash_table
[hashcode
% TYPE_HASH_SIZE
];
899 list_hash_table
[hashcode
% TYPE_HASH_SIZE
] = h
;
902 /* Given TYPE, and HASHCODE its hash code, return the canonical
903 object for an identical list if one already exists.
904 Otherwise, return TYPE, and record it as the canonical object
905 if it is a permanent object.
907 To use this function, first create a list of the sort you want.
908 Then compute its hash code from the fields of the list that
909 make it different from other similar lists.
910 Then call this function and use the value.
911 This function frees the list you pass in if it is a duplicate. */
913 /* Set to 1 to debug without canonicalization. Never set by program. */
914 static int debug_no_list_hash
= 0;
917 list_hash_canon (hashcode
, list
)
923 if (debug_no_list_hash
)
926 t1
= list_hash_lookup (hashcode
, list
);
929 obstack_free (&class_obstack
, list
);
933 /* If this is a new list, record it for later reuse. */
934 list_hash_add (hashcode
, list
);
940 hash_tree_cons (via_public
, via_virtual
, via_protected
, purpose
, value
, chain
)
941 int via_public
, via_virtual
, via_protected
;
942 tree purpose
, value
, chain
;
944 struct obstack
*ambient_obstack
= current_obstack
;
948 current_obstack
= &class_obstack
;
949 t
= tree_cons (purpose
, value
, chain
);
950 TREE_VIA_PUBLIC (t
) = via_public
;
951 TREE_VIA_PROTECTED (t
) = via_protected
;
952 TREE_VIA_VIRTUAL (t
) = via_virtual
;
953 hashcode
= list_hash (t
);
954 t
= list_hash_canon (hashcode
, t
);
955 current_obstack
= ambient_obstack
;
959 /* Constructor for hashed lists. */
961 hash_tree_chain (value
, chain
)
964 struct obstack
*ambient_obstack
= current_obstack
;
968 current_obstack
= &class_obstack
;
969 t
= tree_cons (NULL_TREE
, value
, chain
);
970 hashcode
= list_hash (t
);
971 t
= list_hash_canon (hashcode
, t
);
972 current_obstack
= ambient_obstack
;
976 /* Similar, but used for concatenating two lists. */
978 hash_chainon (list1
, list2
)
985 if (TREE_CHAIN (list1
) == NULL_TREE
)
986 return hash_tree_chain (TREE_VALUE (list1
), list2
);
987 return hash_tree_chain (TREE_VALUE (list1
),
988 hash_chainon (TREE_CHAIN (list1
), list2
));
992 get_identifier_list (value
)
995 tree list
= IDENTIFIER_AS_LIST (value
);
996 if (list
!= NULL_TREE
997 && (TREE_CODE (list
) != TREE_LIST
998 || TREE_VALUE (list
) != value
))
1000 else if (IDENTIFIER_HAS_TYPE_VALUE (value
)
1001 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value
)) == RECORD_TYPE
1002 && IDENTIFIER_TYPE_VALUE (value
)
1003 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value
)))
1005 tree type
= IDENTIFIER_TYPE_VALUE (value
);
1007 if (TYPE_PTRMEMFUNC_P (type
))
1009 else if (type
== current_class_type
)
1010 /* Don't mess up the constructor name. */
1011 list
= tree_cons (NULL_TREE
, value
, NULL_TREE
);
1015 /* This will return the correct thing for regular types,
1016 nested types, and templates. Yay! */
1017 if (TYPE_NESTED_NAME (type
))
1018 id
= TYPE_NESTED_NAME (type
);
1020 id
= TYPE_IDENTIFIER (type
);
1022 if (CLASSTYPE_ID_AS_LIST (type
) == NULL_TREE
)
1023 CLASSTYPE_ID_AS_LIST (type
)
1024 = perm_tree_cons (NULL_TREE
, id
, NULL_TREE
);
1025 list
= CLASSTYPE_ID_AS_LIST (type
);
1032 get_decl_list (value
)
1035 tree list
= NULL_TREE
;
1037 if (TREE_CODE (value
) == IDENTIFIER_NODE
)
1038 list
= get_identifier_list (value
);
1039 else if (TREE_CODE (value
) == RECORD_TYPE
1040 && TYPE_LANG_SPECIFIC (value
))
1041 list
= CLASSTYPE_AS_LIST (value
);
1043 if (list
!= NULL_TREE
)
1045 my_friendly_assert (TREE_CHAIN (list
) == NULL_TREE
, 301);
1049 return build_decl_list (NULL_TREE
, value
);
1052 /* Look in the type hash table for a type isomorphic to
1053 `build_tree_list (NULL_TREE, VALUE)'.
1054 If one is found, return it. Otherwise return 0. */
1057 list_hash_lookup_or_cons (value
)
1060 register int hashcode
= TYPE_HASH (value
);
1061 register struct list_hash
*h
;
1062 struct obstack
*ambient_obstack
;
1063 tree list
= NULL_TREE
;
1065 if (TREE_CODE (value
) == IDENTIFIER_NODE
)
1066 list
= get_identifier_list (value
);
1067 else if (TREE_CODE (value
) == TYPE_DECL
1068 && TREE_CODE (TREE_TYPE (value
)) == RECORD_TYPE
1069 && TYPE_LANG_SPECIFIC (TREE_TYPE (value
)))
1070 list
= CLASSTYPE_ID_AS_LIST (TREE_TYPE (value
));
1071 else if (TREE_CODE (value
) == RECORD_TYPE
1072 && TYPE_LANG_SPECIFIC (value
))
1073 list
= CLASSTYPE_AS_LIST (value
);
1075 if (list
!= NULL_TREE
)
1077 my_friendly_assert (TREE_CHAIN (list
) == NULL_TREE
, 302);
1081 if (debug_no_list_hash
)
1082 return hash_tree_chain (value
, NULL_TREE
);
1084 for (h
= list_hash_table
[hashcode
% TYPE_HASH_SIZE
]; h
; h
= h
->next
)
1085 if (h
->hashcode
== hashcode
1086 && TREE_VIA_VIRTUAL (h
->list
) == 0
1087 && TREE_VIA_PUBLIC (h
->list
) == 0
1088 && TREE_VIA_PROTECTED (h
->list
) == 0
1089 && TREE_PURPOSE (h
->list
) == 0
1090 && TREE_VALUE (h
->list
) == value
)
1092 my_friendly_assert (TREE_TYPE (h
->list
) == 0, 303);
1093 my_friendly_assert (TREE_CHAIN (h
->list
) == 0, 304);
1097 ambient_obstack
= current_obstack
;
1098 current_obstack
= &class_obstack
;
1099 list
= build_tree_list (NULL_TREE
, value
);
1100 list_hash_add (hashcode
, list
);
1101 current_obstack
= ambient_obstack
;
1105 /* Build an association between TYPE and some parameters:
1107 OFFSET is the offset added to `this' to convert it to a pointer
1110 BINFO is the base binfo to use, if we are deriving from one. This
1111 is necessary, as we want specialized parent binfos from base
1112 classes, so that the VTABLE_NAMEs of bases are for the most derived
1113 type, instead of of the simple type.
1115 VTABLE is the virtual function table with which to initialize
1116 sub-objects of type TYPE.
1118 VIRTUALS are the virtual functions sitting in VTABLE.
1120 CHAIN are more associations we must retain. */
1123 make_binfo (offset
, binfo
, vtable
, virtuals
, chain
)
1125 tree vtable
, virtuals
;
1128 tree new_binfo
= make_tree_vec (6);
1131 if (TREE_CODE (binfo
) == TREE_VEC
)
1132 type
= BINFO_TYPE (binfo
);
1136 binfo
= TYPE_BINFO (binfo
);
1139 TREE_CHAIN (new_binfo
) = chain
;
1141 TREE_USED (new_binfo
) = TREE_USED (chain
);
1143 TREE_TYPE (new_binfo
) = TYPE_MAIN_VARIANT (type
);
1144 BINFO_OFFSET (new_binfo
) = offset
;
1145 BINFO_VTABLE (new_binfo
) = vtable
;
1146 BINFO_VIRTUALS (new_binfo
) = virtuals
;
1147 BINFO_VPTR_FIELD (new_binfo
) = NULL_TREE
;
1149 if (binfo
&& BINFO_BASETYPES (binfo
) != NULL_TREE
)
1150 BINFO_BASETYPES (new_binfo
) = copy_node (BINFO_BASETYPES (binfo
));
1158 tree binfo
= copy_list (list
);
1162 TREE_USED (binfo
) = 0;
1163 if (BINFO_BASETYPES (binfo
))
1164 BINFO_BASETYPES (binfo
) = copy_node (BINFO_BASETYPES (binfo
));
1165 binfo
= TREE_CHAIN (binfo
);
1170 /* Return the binfo value for ELEM in TYPE. */
1173 binfo_value (elem
, type
)
1177 if (get_base_distance (elem
, type
, 0, (tree
*)0) == -2)
1178 compiler_error ("base class `%s' ambiguous in binfo_value",
1179 TYPE_NAME_STRING (elem
));
1181 return TYPE_BINFO (type
);
1182 if (TREE_CODE (elem
) == RECORD_TYPE
&& TYPE_BINFO (elem
) == type
)
1184 return get_binfo (elem
, type
, 0);
1191 register tree prev
= 0, tmp
, next
;
1192 for (tmp
= path
; tmp
; tmp
= next
)
1194 next
= BINFO_INHERITANCE_CHAIN (tmp
);
1195 BINFO_INHERITANCE_CHAIN (tmp
) = prev
;
1202 virtual_member (elem
, list
)
1209 for (t
= list
; t
; t
= TREE_CHAIN (t
))
1210 if (elem
== BINFO_TYPE (t
))
1213 for (t
= list
; t
; t
= TREE_CHAIN (t
))
1215 tree binfos
= BINFO_BASETYPES (t
);
1218 if (binfos
!= NULL_TREE
)
1219 for (i
= TREE_VEC_LENGTH (binfos
)-1; i
>= 0; i
--)
1221 nval
= binfo_value (elem
, BINFO_TYPE (TREE_VEC_ELT (binfos
, i
)));
1224 if (rval
&& BINFO_OFFSET (nval
) != BINFO_OFFSET (rval
))
1225 my_friendly_abort (104);
1240 fprintf (stderr
, "type \"%s\"; offset = %d\n",
1241 TYPE_NAME_STRING (BINFO_TYPE (elem
)),
1242 TREE_INT_CST_LOW (BINFO_OFFSET (elem
)));
1243 fprintf (stderr
, "vtable type:\n");
1244 debug_tree (BINFO_TYPE (elem
));
1245 if (BINFO_VTABLE (elem
))
1246 fprintf (stderr
, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem
))));
1248 fprintf (stderr
, "no vtable decl yet\n");
1249 fprintf (stderr
, "virtuals:\n");
1250 virtuals
= BINFO_VIRTUALS (elem
);
1253 /* skip the rtti type descriptor entry */
1254 virtuals
= TREE_CHAIN (virtuals
);
1259 tree fndecl
= TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals
)), 0);
1260 fprintf (stderr
, "%s [%d =? %d]\n",
1261 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl
)),
1262 i
, TREE_INT_CST_LOW (DECL_VINDEX (fndecl
)));
1263 virtuals
= TREE_CHAIN (virtuals
);
1268 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1269 We expect a null pointer to mark the end of the chain.
1270 This is the Lisp primitive `length'. */
1273 decl_list_length (t
)
1277 register int len
= 0;
1279 my_friendly_assert (TREE_CODE (t
) == FUNCTION_DECL
1280 || TREE_CODE (t
) == TEMPLATE_DECL
, 300);
1281 for (tail
= t
; tail
; tail
= DECL_CHAIN (tail
))
1291 if (TREE_CODE (t
) == FUNCTION_DECL
)
1293 else if (TREE_CODE (t
) == TREE_LIST
)
1294 return decl_list_length (TREE_VALUE (t
));
1296 my_friendly_abort (359);
1300 /* Like value_member, but for DECL_CHAINs. */
1302 decl_value_member (elem
, list
)
1309 list
= DECL_CHAIN (list
);
1315 is_overloaded_fn (x
)
1318 if (TREE_CODE (x
) == FUNCTION_DECL
)
1321 if (TREE_CODE (x
) == TREE_LIST
1322 && (TREE_CODE (TREE_VALUE (x
)) == FUNCTION_DECL
1323 || TREE_CODE (TREE_VALUE (x
)) == TEMPLATE_DECL
))
1330 really_overloaded_fn (x
)
1333 if (TREE_CODE (x
) == TREE_LIST
1334 && (TREE_CODE (TREE_VALUE (x
)) == FUNCTION_DECL
1335 || TREE_CODE (TREE_VALUE (x
)) == TEMPLATE_DECL
))
1345 if (TREE_CODE (from
) == FUNCTION_DECL
)
1348 my_friendly_assert (TREE_CODE (from
) == TREE_LIST
, 9);
1350 return TREE_VALUE (from
);
1354 fnaddr_from_vtable_entry (entry
)
1357 if (flag_vtable_thunks
)
1360 if (TREE_CODE (func
) == ADDR_EXPR
)
1361 func
= TREE_OPERAND (func
, 0);
1362 if (TREE_CODE (func
) == THUNK_DECL
)
1363 return DECL_INITIAL (func
);
1368 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry
))));
1372 set_fnaddr_from_vtable_entry (entry
, value
)
1375 if (flag_vtable_thunks
)
1378 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry
)))) = value
;
1382 function_arg_chain (t
)
1385 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t
)));
1389 promotes_to_aggr_type (t
, code
)
1391 enum tree_code code
;
1393 if (TREE_CODE (t
) == code
)
1395 return IS_AGGR_TYPE (t
);
1399 is_aggr_type_2 (t1
, t2
)
1402 if (TREE_CODE (t1
) != TREE_CODE (t2
))
1404 return IS_AGGR_TYPE (t1
) && IS_AGGR_TYPE (t2
);
1407 /* Give message using types TYPE1 and TYPE2 as arguments.
1408 PFN is the function which will print the message;
1409 S is the format string for PFN to use. */
1411 message_2_types (pfn
, s
, type1
, type2
)
1416 tree name1
= TYPE_NAME (type1
);
1417 tree name2
= TYPE_NAME (type2
);
1418 if (TREE_CODE (name1
) == TYPE_DECL
)
1419 name1
= DECL_NAME (name1
);
1420 if (TREE_CODE (name2
) == TYPE_DECL
)
1421 name2
= DECL_NAME (name2
);
1422 (*pfn
) (s
, IDENTIFIER_POINTER (name1
), IDENTIFIER_POINTER (name2
));
1425 #define PRINT_RING_SIZE 4
1428 lang_printable_name (decl
)
1431 static tree decl_ring
[PRINT_RING_SIZE
];
1432 static char *print_ring
[PRINT_RING_SIZE
];
1433 static int ring_counter
;
1436 /* Only cache functions. */
1437 if (TREE_CODE (decl
) != FUNCTION_DECL
1438 || DECL_LANG_SPECIFIC (decl
) == 0)
1439 return decl_as_string (decl
, 1);
1441 /* See if this print name is lying around. */
1442 for (i
= 0; i
< PRINT_RING_SIZE
; i
++)
1443 if (decl_ring
[i
] == decl
)
1444 /* yes, so return it. */
1445 return print_ring
[i
];
1447 if (++ring_counter
== PRINT_RING_SIZE
)
1450 if (current_function_decl
!= NULL_TREE
)
1452 if (decl_ring
[ring_counter
] == current_function_decl
)
1454 if (ring_counter
== PRINT_RING_SIZE
)
1456 if (decl_ring
[ring_counter
] == current_function_decl
)
1457 my_friendly_abort (106);
1460 if (print_ring
[ring_counter
])
1461 free (print_ring
[ring_counter
]);
1464 int print_ret_type_p
1465 = (!DECL_CONSTRUCTOR_P (decl
)
1466 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl
)));
1468 char *name
= (char *)decl_as_string (decl
, print_ret_type_p
);
1469 print_ring
[ring_counter
] = (char *)malloc (strlen (name
) + 1);
1470 strcpy (print_ring
[ring_counter
], name
);
1471 decl_ring
[ring_counter
] = decl
;
1473 return print_ring
[ring_counter
];
1476 /* Comparison function for sorting identifiers in RAISES lists.
1477 Note that because IDENTIFIER_NODEs are unique, we can sort
1478 them by address, saving an indirection. */
1483 return (HOST_WIDE_INT
)TREE_VALUE (*p1
) - (HOST_WIDE_INT
)TREE_VALUE (*p2
);
1486 /* Build the FUNCTION_TYPE or METHOD_TYPE which may raise exceptions
1487 listed in RAISES. */
1489 build_exception_variant (ctype
, type
, raises
)
1494 tree v
= TYPE_MAIN_VARIANT (type
);
1496 tree
*a
= (tree
*)alloca ((list_length (raises
)+1) * sizeof (tree
));
1497 int constp
= TYPE_READONLY (type
);
1498 int volatilep
= TYPE_VOLATILE (type
);
1500 for (v
= TYPE_NEXT_VARIANT (v
); v
; v
= TYPE_NEXT_VARIANT (v
))
1502 if (TYPE_READONLY (v
) != constp
1503 || TYPE_VOLATILE (v
) != volatilep
)
1506 /* @@ This should do set equality, not exact match. */
1507 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v
), raises
))
1508 /* List of exceptions raised matches previously found list.
1510 @@ Nice to free up storage used in consing up the
1511 @@ list of exceptions raised. */
1515 /* Need to build a new variant. */
1516 v
= copy_node (type
);
1517 TYPE_NEXT_VARIANT (v
) = TYPE_NEXT_VARIANT (type
);
1518 TYPE_NEXT_VARIANT (type
) = v
;
1519 if (raises
&& ! TREE_PERMANENT (raises
))
1521 push_obstacks_nochange ();
1522 end_temporary_allocation ();
1523 raises
= copy_list (raises
);
1526 TYPE_RAISES_EXCEPTIONS (v
) = raises
;
1530 /* Subroutine of copy_to_permanent
1532 Assuming T is a node build bottom-up, make it all exist on
1533 permanent obstack, if it is not permanent already. */
1538 enum tree_code code
;
1540 if (t
== NULL_TREE
|| TREE_PERMANENT (t
))
1543 switch (code
= TREE_CODE (t
))
1546 return error_mark_node
;
1555 tree chain
= TREE_CHAIN (t
);
1557 TREE_CHAIN (t
) = make_deep_copy (chain
);
1558 TREE_TYPE (t
) = make_deep_copy (TREE_TYPE (t
));
1559 DECL_INITIAL (t
) = make_deep_copy (DECL_INITIAL (t
));
1560 DECL_SIZE (t
) = make_deep_copy (DECL_SIZE (t
));
1566 tree chain
= TREE_CHAIN (t
);
1568 TREE_PURPOSE (t
) = make_deep_copy (TREE_PURPOSE (t
));
1569 TREE_VALUE (t
) = make_deep_copy (TREE_VALUE (t
));
1570 TREE_CHAIN (t
) = make_deep_copy (chain
);
1576 int len
= TREE_VEC_LENGTH (t
);
1580 TREE_VEC_ELT (t
, len
) = make_deep_copy (TREE_VEC_ELT (t
, len
));
1587 return copy_node (t
);
1593 TREE_OPERAND (t
, 0) = make_deep_copy (TREE_OPERAND (t
, 0));
1594 TREE_OPERAND (t
, 1) = make_deep_copy (TREE_OPERAND (t
, 1));
1595 TREE_OPERAND (t
, 2) = make_deep_copy (TREE_OPERAND (t
, 2));
1600 TREE_OPERAND (t
, 0) = make_deep_copy (TREE_OPERAND (t
, 0));
1607 case TRUNC_DIV_EXPR
:
1608 case TRUNC_MOD_EXPR
:
1616 case BIT_ANDTC_EXPR
:
1617 case TRUTH_ANDIF_EXPR
:
1618 case TRUTH_ORIF_EXPR
:
1626 case FLOOR_DIV_EXPR
:
1627 case ROUND_DIV_EXPR
:
1629 case FLOOR_MOD_EXPR
:
1630 case ROUND_MOD_EXPR
:
1632 case PREDECREMENT_EXPR
:
1633 case PREINCREMENT_EXPR
:
1634 case POSTDECREMENT_EXPR
:
1635 case POSTINCREMENT_EXPR
:
1638 TREE_OPERAND (t
, 0) = make_deep_copy (TREE_OPERAND (t
, 0));
1639 TREE_OPERAND (t
, 1) = make_deep_copy (TREE_OPERAND (t
, 1));
1647 case TRUTH_NOT_EXPR
:
1651 TREE_OPERAND (t
, 0) = make_deep_copy (TREE_OPERAND (t
, 0));
1655 return build_pointer_type (make_deep_copy (TREE_TYPE (t
)));
1656 case REFERENCE_TYPE
:
1657 return build_reference_type (make_deep_copy (TREE_TYPE (t
)));
1659 return build_function_type (make_deep_copy (TREE_TYPE (t
)),
1660 make_deep_copy (TYPE_ARG_TYPES (t
)));
1662 return build_array_type (make_deep_copy (TREE_TYPE (t
)),
1663 make_deep_copy (TYPE_DOMAIN (t
)));
1665 return build_offset_type (make_deep_copy (TYPE_OFFSET_BASETYPE (t
)),
1666 make_deep_copy (TREE_TYPE (t
)));
1668 return build_method_type
1669 (make_deep_copy (TYPE_METHOD_BASETYPE (t
)),
1671 (make_deep_copy (TREE_TYPE (t
)),
1672 make_deep_copy (TREE_CHAIN (TYPE_ARG_TYPES (t
)))));
1674 if (TYPE_PTRMEMFUNC_P (t
))
1675 return build_ptrmemfunc_type
1676 (make_deep_copy (TYPE_PTRMEMFUNC_FN_TYPE (t
)));
1677 /* else fall through */
1679 /* This list is incomplete, but should suffice for now.
1680 It is very important that `sorry' does not call
1681 `report_error_function'. That could cause an infinite loop. */
1683 sorry ("initializer contains unrecognized tree code");
1684 return error_mark_node
;
1687 my_friendly_abort (107);
1692 /* Assuming T is a node built bottom-up, make it all exist on
1693 permanent obstack, if it is not permanent already. */
1695 copy_to_permanent (t
)
1698 register struct obstack
*ambient_obstack
= current_obstack
;
1699 register struct obstack
*ambient_saveable_obstack
= saveable_obstack
;
1701 if (t
== NULL_TREE
|| TREE_PERMANENT (t
))
1704 saveable_obstack
= &permanent_obstack
;
1705 current_obstack
= saveable_obstack
;
1707 t
= make_deep_copy (t
);
1709 current_obstack
= ambient_obstack
;
1710 saveable_obstack
= ambient_saveable_obstack
;
1716 print_lang_statistics ()
1718 extern struct obstack maybepermanent_obstack
;
1719 print_obstack_statistics ("class_obstack", &class_obstack
);
1720 print_obstack_statistics ("permanent_obstack", &permanent_obstack
);
1721 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack
);
1722 print_search_statistics ();
1723 print_class_statistics ();
1726 /* This is used by the `assert' macro. It is provided in libgcc.a,
1727 which `cc' doesn't know how to link. Note that the C++ front-end
1728 no longer actually uses the `assert' macro (instead, it calls
1729 my_friendly_assert). But all of the back-end files still need this. */
1731 __eprintf (string
, expression
, line
, filename
)
1734 const char *expression
;
1736 const char *filename
;
1744 fprintf (stderr
, string
, expression
, line
, filename
);
1749 /* Return, as an INTEGER_CST node, the number of elements for
1750 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1753 array_type_nelts_top (type
)
1756 return fold (build (PLUS_EXPR
, sizetype
,
1757 array_type_nelts (type
),
1761 /* Return, as an INTEGER_CST node, the number of elements for
1762 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1763 ARRAY_TYPEs that are clumped together. */
1766 array_type_nelts_total (type
)
1769 tree sz
= array_type_nelts_top (type
);
1770 type
= TREE_TYPE (type
);
1771 while (TREE_CODE (type
) == ARRAY_TYPE
)
1773 tree n
= array_type_nelts_top (type
);
1774 sz
= fold (build (MULT_EXPR
, sizetype
, sz
, n
));
1775 type
= TREE_TYPE (type
);