expr.c (expand_expr, [...]): Move from the C++ frontend to the backend where it belongs.
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include <stdio.h>
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #ifdef __STDC__
30 #include <stdarg.h>
31 #else
32 #include <varargs.h>
33 #endif
34
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37 /* Return nonzero if REF is an lvalue valid for this language.
38 Lvalues can be assigned, unless they have TREE_READONLY.
39 Lvalues can have their address taken, unless they have DECL_REGISTER. */
40
41 int
42 real_lvalue_p (ref)
43 tree ref;
44 {
45 if (! language_lvalue_valid (ref))
46 return 0;
47
48 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
49 return 1;
50
51 if (ref == current_class_ptr && flag_this_is_variable <= 0)
52 return 0;
53
54 switch (TREE_CODE (ref))
55 {
56 /* preincrements and predecrements are valid lvals, provided
57 what they refer to are valid lvals. */
58 case PREINCREMENT_EXPR:
59 case PREDECREMENT_EXPR:
60 case COMPONENT_REF:
61 case SAVE_EXPR:
62 return real_lvalue_p (TREE_OPERAND (ref, 0));
63
64 case STRING_CST:
65 return 1;
66
67 case VAR_DECL:
68 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
69 && DECL_LANG_SPECIFIC (ref)
70 && DECL_IN_AGGR_P (ref))
71 return 0;
72 case INDIRECT_REF:
73 case ARRAY_REF:
74 case PARM_DECL:
75 case RESULT_DECL:
76 case ERROR_MARK:
77 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
78 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
79 return 1;
80 break;
81
82 /* A currently unresolved scope ref. */
83 case SCOPE_REF:
84 my_friendly_abort (103);
85 case OFFSET_REF:
86 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
87 return 1;
88 return real_lvalue_p (TREE_OPERAND (ref, 0))
89 && real_lvalue_p (TREE_OPERAND (ref, 1));
90 break;
91
92 case COND_EXPR:
93 return (real_lvalue_p (TREE_OPERAND (ref, 1))
94 && real_lvalue_p (TREE_OPERAND (ref, 2)));
95
96 case MODIFY_EXPR:
97 return 1;
98
99 case COMPOUND_EXPR:
100 return real_lvalue_p (TREE_OPERAND (ref, 1));
101
102 case MAX_EXPR:
103 case MIN_EXPR:
104 return (real_lvalue_p (TREE_OPERAND (ref, 0))
105 && real_lvalue_p (TREE_OPERAND (ref, 1)));
106 }
107
108 return 0;
109 }
110
111 int
112 lvalue_p (ref)
113 tree ref;
114 {
115 if (! language_lvalue_valid (ref))
116 return 0;
117
118 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
119 return 1;
120
121 if (ref == current_class_ptr && flag_this_is_variable <= 0)
122 return 0;
123
124 switch (TREE_CODE (ref))
125 {
126 /* preincrements and predecrements are valid lvals, provided
127 what they refer to are valid lvals. */
128 case PREINCREMENT_EXPR:
129 case PREDECREMENT_EXPR:
130 case COMPONENT_REF:
131 case SAVE_EXPR:
132 return lvalue_p (TREE_OPERAND (ref, 0));
133
134 case STRING_CST:
135 return 1;
136
137 case VAR_DECL:
138 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
139 && DECL_LANG_SPECIFIC (ref)
140 && DECL_IN_AGGR_P (ref))
141 return 0;
142 case INDIRECT_REF:
143 case ARRAY_REF:
144 case PARM_DECL:
145 case RESULT_DECL:
146 case ERROR_MARK:
147 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
148 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
149 return 1;
150 break;
151
152 case TARGET_EXPR:
153 return 1;
154
155 case CALL_EXPR:
156 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
157 return 1;
158 break;
159
160 /* A currently unresolved scope ref. */
161 case SCOPE_REF:
162 my_friendly_abort (103);
163 case OFFSET_REF:
164 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
165 return 1;
166 return lvalue_p (TREE_OPERAND (ref, 0))
167 && lvalue_p (TREE_OPERAND (ref, 1));
168 break;
169
170 case COND_EXPR:
171 return (lvalue_p (TREE_OPERAND (ref, 1))
172 && lvalue_p (TREE_OPERAND (ref, 2)));
173
174 case MODIFY_EXPR:
175 return 1;
176
177 case COMPOUND_EXPR:
178 return lvalue_p (TREE_OPERAND (ref, 1));
179
180 case MAX_EXPR:
181 case MIN_EXPR:
182 return (lvalue_p (TREE_OPERAND (ref, 0))
183 && lvalue_p (TREE_OPERAND (ref, 1)));
184 }
185
186 return 0;
187 }
188
189 /* Return nonzero if REF is an lvalue valid for this language;
190 otherwise, print an error message and return zero. */
191
192 int
193 lvalue_or_else (ref, string)
194 tree ref;
195 char *string;
196 {
197 int win = lvalue_p (ref);
198 if (! win)
199 error ("non-lvalue in %s", string);
200 return win;
201 }
202
203 /* INIT is a CALL_EXPR which needs info about its target.
204 TYPE is the type that this initialization should appear to have.
205
206 Build an encapsulation of the initialization to perform
207 and return it so that it can be processed by language-independent
208 and language-specific expression expanders. */
209 tree
210 build_cplus_new (type, init)
211 tree type;
212 tree init;
213 {
214 tree slot;
215 tree rval;
216
217 slot = build (VAR_DECL, type);
218 layout_decl (slot, 0);
219 rval = build (NEW_EXPR, type,
220 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
221 TREE_SIDE_EFFECTS (rval) = 1;
222 TREE_ADDRESSABLE (rval) = 1;
223 rval = build (TARGET_EXPR, type, slot, rval, NULL_TREE);
224 TREE_SIDE_EFFECTS (rval) = 1;
225 TREE_ADDRESSABLE (rval) = 1;
226
227 return rval;
228 }
229
230 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
231 these CALL_EXPRs with tree nodes that will perform the cleanups. */
232
233 tree
234 break_out_cleanups (exp)
235 tree exp;
236 {
237 tree tmp = exp;
238
239 if (TREE_CODE (tmp) == CALL_EXPR
240 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
241 return build_cplus_new (TREE_TYPE (tmp), tmp);
242
243 while (TREE_CODE (tmp) == NOP_EXPR
244 || TREE_CODE (tmp) == CONVERT_EXPR
245 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
246 {
247 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
248 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
249 {
250 TREE_OPERAND (tmp, 0)
251 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
252 TREE_OPERAND (tmp, 0));
253 break;
254 }
255 else
256 tmp = TREE_OPERAND (tmp, 0);
257 }
258 return exp;
259 }
260
261 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
262 copies where they are found. Returns a deep copy all nodes transitively
263 containing CALL_EXPRs. */
264
265 tree
266 break_out_calls (exp)
267 tree exp;
268 {
269 register tree t1, t2;
270 register enum tree_code code;
271 register int changed = 0;
272 register int i;
273
274 if (exp == NULL_TREE)
275 return exp;
276
277 code = TREE_CODE (exp);
278
279 if (code == CALL_EXPR)
280 return copy_node (exp);
281
282 /* Don't try and defeat a save_expr, as it should only be done once. */
283 if (code == SAVE_EXPR)
284 return exp;
285
286 switch (TREE_CODE_CLASS (code))
287 {
288 default:
289 abort ();
290
291 case 'c': /* a constant */
292 case 't': /* a type node */
293 case 'x': /* something random, like an identifier or an ERROR_MARK. */
294 return exp;
295
296 case 'd': /* A decl node */
297 #if 0 /* This is bogus. jason 9/21/94 */
298
299 t1 = break_out_calls (DECL_INITIAL (exp));
300 if (t1 != DECL_INITIAL (exp))
301 {
302 exp = copy_node (exp);
303 DECL_INITIAL (exp) = t1;
304 }
305 #endif
306 return exp;
307
308 case 'b': /* A block node */
309 {
310 /* Don't know how to handle these correctly yet. Must do a
311 break_out_calls on all DECL_INITIAL values for local variables,
312 and also break_out_calls on all sub-blocks and sub-statements. */
313 abort ();
314 }
315 return exp;
316
317 case 'e': /* an expression */
318 case 'r': /* a reference */
319 case 's': /* an expression with side effects */
320 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
321 {
322 t1 = break_out_calls (TREE_OPERAND (exp, i));
323 if (t1 != TREE_OPERAND (exp, i))
324 {
325 exp = copy_node (exp);
326 TREE_OPERAND (exp, i) = t1;
327 }
328 }
329 return exp;
330
331 case '<': /* a comparison expression */
332 case '2': /* a binary arithmetic expression */
333 t2 = break_out_calls (TREE_OPERAND (exp, 1));
334 if (t2 != TREE_OPERAND (exp, 1))
335 changed = 1;
336 case '1': /* a unary arithmetic expression */
337 t1 = break_out_calls (TREE_OPERAND (exp, 0));
338 if (t1 != TREE_OPERAND (exp, 0))
339 changed = 1;
340 if (changed)
341 {
342 if (tree_code_length[(int) code] == 1)
343 return build1 (code, TREE_TYPE (exp), t1);
344 else
345 return build (code, TREE_TYPE (exp), t1, t2);
346 }
347 return exp;
348 }
349
350 }
351 \f
352 extern struct obstack *current_obstack;
353 extern struct obstack permanent_obstack, class_obstack;
354 extern struct obstack *saveable_obstack;
355
356 /* Here is how primitive or already-canonicalized types' hash
357 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
358 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
359
360 /* Construct, lay out and return the type of methods belonging to class
361 BASETYPE and whose arguments are described by ARGTYPES and whose values
362 are described by RETTYPE. If each type exists already, reuse it. */
363 tree
364 build_cplus_method_type (basetype, rettype, argtypes)
365 tree basetype, rettype, argtypes;
366 {
367 register tree t;
368 tree ptype;
369 int hashcode;
370
371 /* Make a node of the sort we want. */
372 t = make_node (METHOD_TYPE);
373
374 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
375 TREE_TYPE (t) = rettype;
376 if (IS_SIGNATURE (basetype))
377 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
378 TYPE_READONLY (basetype),
379 TYPE_VOLATILE (basetype));
380 else
381 ptype = build_pointer_type (basetype);
382
383 /* The actual arglist for this function includes a "hidden" argument
384 which is "this". Put it into the list of argument types. */
385
386 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
387 TYPE_ARG_TYPES (t) = argtypes;
388 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
389
390 /* If we already have such a type, use the old one and free this one.
391 Note that it also frees up the above cons cell if found. */
392 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
393 t = type_hash_canon (hashcode, t);
394
395 if (TYPE_SIZE (t) == 0)
396 layout_type (t);
397
398 return t;
399 }
400
401 tree
402 build_cplus_array_type (elt_type, index_type)
403 tree elt_type;
404 tree index_type;
405 {
406 register struct obstack *ambient_obstack = current_obstack;
407 register struct obstack *ambient_saveable_obstack = saveable_obstack;
408 tree t;
409
410 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
411 make this permanent too. */
412 if (TREE_PERMANENT (elt_type)
413 && (index_type == 0 || TREE_PERMANENT (index_type)))
414 {
415 current_obstack = &permanent_obstack;
416 saveable_obstack = &permanent_obstack;
417 }
418
419 if (current_template_parms)
420 {
421 t = make_node (ARRAY_TYPE);
422 TREE_TYPE (t) = elt_type;
423 TYPE_DOMAIN (t) = index_type;
424 }
425 else
426 t = build_array_type (elt_type, index_type);
427
428 /* Push these needs up so that initialization takes place
429 more easily. */
430 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
431 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
432 current_obstack = ambient_obstack;
433 saveable_obstack = ambient_saveable_obstack;
434 return t;
435 }
436 \f
437 /* Make a variant type in the proper way for C/C++, propagating qualifiers
438 down to the element type of an array. */
439
440 tree
441 cp_build_type_variant (type, constp, volatilep)
442 tree type;
443 int constp, volatilep;
444 {
445 if (type == error_mark_node)
446 return type;
447
448 if (TREE_CODE (type) == ARRAY_TYPE)
449 {
450 tree real_main_variant = TYPE_MAIN_VARIANT (type);
451
452 push_obstacks (TYPE_OBSTACK (real_main_variant),
453 TYPE_OBSTACK (real_main_variant));
454 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
455 constp, volatilep),
456 TYPE_DOMAIN (type));
457
458 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
459 make a copy. (TYPE might have come from the hash table and
460 REAL_MAIN_VARIANT might be in some function's obstack.) */
461
462 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
463 {
464 type = copy_node (type);
465 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
466 }
467
468 TYPE_MAIN_VARIANT (type) = real_main_variant;
469 pop_obstacks ();
470 }
471 return build_type_variant (type, constp, volatilep);
472 }
473 \f
474 /* Add OFFSET to all base types of T.
475
476 OFFSET, which is a type offset, is number of bytes.
477
478 Note that we don't have to worry about having two paths to the
479 same base type, since this type owns its association list. */
480 void
481 propagate_binfo_offsets (binfo, offset)
482 tree binfo;
483 tree offset;
484 {
485 tree binfos = BINFO_BASETYPES (binfo);
486 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
487
488 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
489 {
490 tree base_binfo = TREE_VEC_ELT (binfos, i);
491
492 if (TREE_VIA_VIRTUAL (base_binfo))
493 i += 1;
494 else
495 {
496 int j;
497 tree base_binfos = BINFO_BASETYPES (base_binfo);
498 tree delta;
499
500 for (j = i+1; j < n_baselinks; j++)
501 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
502 {
503 /* The next basetype offset must take into account the space
504 between the classes, not just the size of each class. */
505 delta = size_binop (MINUS_EXPR,
506 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
507 BINFO_OFFSET (base_binfo));
508 break;
509 }
510
511 #if 0
512 if (BINFO_OFFSET_ZEROP (base_binfo))
513 BINFO_OFFSET (base_binfo) = offset;
514 else
515 BINFO_OFFSET (base_binfo)
516 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
517 #else
518 BINFO_OFFSET (base_binfo) = offset;
519 #endif
520 if (base_binfos)
521 {
522 int k;
523 tree chain = NULL_TREE;
524
525 /* Now unshare the structure beneath BASE_BINFO. */
526 for (k = TREE_VEC_LENGTH (base_binfos)-1;
527 k >= 0; k--)
528 {
529 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
530 if (! TREE_VIA_VIRTUAL (base_base_binfo))
531 TREE_VEC_ELT (base_binfos, k)
532 = make_binfo (BINFO_OFFSET (base_base_binfo),
533 base_base_binfo,
534 BINFO_VTABLE (base_base_binfo),
535 BINFO_VIRTUALS (base_base_binfo),
536 chain);
537 chain = TREE_VEC_ELT (base_binfos, k);
538 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
539 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
540 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
541 }
542 /* Now propagate the offset to the base types. */
543 propagate_binfo_offsets (base_binfo, offset);
544 }
545
546 /* Go to our next class that counts for offset propagation. */
547 i = j;
548 if (i < n_baselinks)
549 offset = size_binop (PLUS_EXPR, offset, delta);
550 }
551 }
552 }
553
554 /* Compute the actual offsets that our virtual base classes
555 will have *for this type*. This must be performed after
556 the fields are laid out, since virtual baseclasses must
557 lay down at the end of the record.
558
559 Returns the maximum number of virtual functions any of the virtual
560 baseclasses provide. */
561 int
562 layout_vbasetypes (rec, max)
563 tree rec;
564 int max;
565 {
566 /* Get all the virtual base types that this type uses.
567 The TREE_VALUE slot holds the virtual baseclass type. */
568 tree vbase_types = get_vbase_types (rec);
569
570 #ifdef STRUCTURE_SIZE_BOUNDARY
571 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
572 #else
573 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
574 #endif
575 int desired_align;
576
577 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
578 where CONST_SIZE is an integer
579 and VAR_SIZE is a tree expression.
580 If VAR_SIZE is null, the size is just CONST_SIZE.
581 Naturally we try to avoid using VAR_SIZE. */
582 register unsigned const_size = 0;
583 register tree var_size = 0;
584 int nonvirtual_const_size;
585
586 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
587
588 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
589 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
590 else
591 var_size = TYPE_SIZE (rec);
592
593 nonvirtual_const_size = const_size;
594
595 while (vbase_types)
596 {
597 tree basetype = BINFO_TYPE (vbase_types);
598 tree offset;
599
600 desired_align = TYPE_ALIGN (basetype);
601 record_align = MAX (record_align, desired_align);
602
603 if (const_size == 0)
604 offset = integer_zero_node;
605 else
606 {
607 /* Give each virtual base type the alignment it wants. */
608 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
609 * TYPE_ALIGN (basetype);
610 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
611 }
612
613 if (CLASSTYPE_VSIZE (basetype) > max)
614 max = CLASSTYPE_VSIZE (basetype);
615 BINFO_OFFSET (vbase_types) = offset;
616
617 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
618 {
619 /* Every virtual baseclass takes a least a UNIT, so that we can
620 take it's address and get something different for each base. */
621 const_size += MAX (BITS_PER_UNIT,
622 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
623 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
624 }
625 else if (var_size == 0)
626 var_size = TYPE_SIZE (basetype);
627 else
628 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
629
630 vbase_types = TREE_CHAIN (vbase_types);
631 }
632
633 if (const_size)
634 {
635 /* Because a virtual base might take a single byte above,
636 we have to re-adjust the total size to make sure it it
637 a multiple of the alignment. */
638 /* Give the whole object the alignment it wants. */
639 const_size = CEIL (const_size, record_align) * record_align;
640 }
641
642 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
643 here, as that is for this class, without any virtual base classes. */
644 TYPE_ALIGN (rec) = record_align;
645 if (const_size != nonvirtual_const_size)
646 {
647 CLASSTYPE_VBASE_SIZE (rec)
648 = size_int (const_size - nonvirtual_const_size);
649 TYPE_SIZE (rec) = size_int (const_size);
650 }
651
652 /* Now propagate offset information throughout the lattice
653 under the vbase type. */
654 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
655 vbase_types = TREE_CHAIN (vbase_types))
656 {
657 tree base_binfos = BINFO_BASETYPES (vbase_types);
658
659 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
660
661 if (base_binfos)
662 {
663 tree chain = NULL_TREE;
664 int j;
665 /* Now unshare the structure beneath BASE_BINFO. */
666
667 for (j = TREE_VEC_LENGTH (base_binfos)-1;
668 j >= 0; j--)
669 {
670 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
671 if (! TREE_VIA_VIRTUAL (base_base_binfo))
672 TREE_VEC_ELT (base_binfos, j)
673 = make_binfo (BINFO_OFFSET (base_base_binfo),
674 base_base_binfo,
675 BINFO_VTABLE (base_base_binfo),
676 BINFO_VIRTUALS (base_base_binfo),
677 chain);
678 chain = TREE_VEC_ELT (base_binfos, j);
679 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
680 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
681 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
682 }
683
684 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
685 }
686 }
687
688 return max;
689 }
690
691 /* Lay out the base types of a record type, REC.
692 Tentatively set the size and alignment of REC
693 according to the base types alone.
694
695 Offsets for immediate nonvirtual baseclasses are also computed here.
696
697 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
698 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
699
700 Returns list of virtual base classes in a FIELD_DECL chain. */
701 tree
702 layout_basetypes (rec, binfos)
703 tree rec, binfos;
704 {
705 /* Chain to hold all the new FIELD_DECLs which point at virtual
706 base classes. */
707 tree vbase_decls = NULL_TREE;
708
709 #ifdef STRUCTURE_SIZE_BOUNDARY
710 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
711 #else
712 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
713 #endif
714
715 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
716 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
717 the size is just CONST_SIZE. Naturally we try to avoid using
718 VAR_SIZE. And so far, we've been successful. */
719 #if 0
720 register tree var_size = 0;
721 #endif
722
723 register unsigned const_size = 0;
724 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
725
726 /* Handle basetypes almost like fields, but record their
727 offsets differently. */
728
729 for (i = 0; i < n_baseclasses; i++)
730 {
731 int inc, desired_align, int_vbase_size;
732 register tree base_binfo = TREE_VEC_ELT (binfos, i);
733 register tree basetype = BINFO_TYPE (base_binfo);
734 tree decl, offset;
735
736 if (TYPE_SIZE (basetype) == 0)
737 {
738 #if 0
739 /* This error is now reported in xref_tag, thus giving better
740 location information. */
741 error_with_aggr_type (base_binfo,
742 "base class `%s' has incomplete type");
743
744 TREE_VIA_PUBLIC (base_binfo) = 1;
745 TREE_VIA_PROTECTED (base_binfo) = 0;
746 TREE_VIA_VIRTUAL (base_binfo) = 0;
747
748 /* Should handle this better so that
749
750 class A;
751 class B: private A { virtual void F(); };
752
753 does not dump core when compiled. */
754 my_friendly_abort (121);
755 #endif
756 continue;
757 }
758
759 /* All basetypes are recorded in the association list of the
760 derived type. */
761
762 if (TREE_VIA_VIRTUAL (base_binfo))
763 {
764 int j;
765 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
766 + sizeof (VBASE_NAME) + 1);
767
768 /* The offset for a virtual base class is only used in computing
769 virtual function tables and for initializing virtual base
770 pointers. It is built once `get_vbase_types' is called. */
771
772 /* If this basetype can come from another vbase pointer
773 without an additional indirection, we will share
774 that pointer. If an indirection is involved, we
775 make our own pointer. */
776 for (j = 0; j < n_baseclasses; j++)
777 {
778 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
779 if (! TREE_VIA_VIRTUAL (other_base_binfo)
780 && binfo_member (basetype,
781 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
782 goto got_it;
783 }
784 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
785 decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
786 build_pointer_type (basetype));
787 /* If you change any of the below, take a look at all the
788 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
789 them too. */
790 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
791 DECL_VIRTUAL_P (decl) = 1;
792 DECL_FIELD_CONTEXT (decl) = rec;
793 DECL_CLASS_CONTEXT (decl) = rec;
794 DECL_FCONTEXT (decl) = basetype;
795 DECL_SAVED_INSNS (decl) = NULL_RTX;
796 DECL_FIELD_SIZE (decl) = 0;
797 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
798 TREE_CHAIN (decl) = vbase_decls;
799 BINFO_VPTR_FIELD (base_binfo) = decl;
800 vbase_decls = decl;
801
802 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
803 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1)) == NULL_TREE)
804 {
805 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1),
806 "destructor `%s' non-virtual");
807 warning ("in inheritance relationship `%s: virtual %s'",
808 TYPE_NAME_STRING (rec),
809 TYPE_NAME_STRING (basetype));
810 }
811 got_it:
812 /* The space this decl occupies has already been accounted for. */
813 continue;
814 }
815
816 if (const_size == 0)
817 offset = integer_zero_node;
818 else
819 {
820 /* Give each base type the alignment it wants. */
821 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
822 * TYPE_ALIGN (basetype);
823 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
824
825 #if 0
826 /* bpk: Disabled this check until someone is willing to
827 claim it as theirs and explain exactly what circumstances
828 warrant the warning. */
829 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
830 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1)) == NULL_TREE)
831 {
832 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1),
833 "destructor `%s' non-virtual");
834 warning ("in inheritance relationship `%s:%s %s'",
835 TYPE_NAME_STRING (rec),
836 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
837 TYPE_NAME_STRING (basetype));
838 }
839 #endif
840 }
841 BINFO_OFFSET (base_binfo) = offset;
842 if (CLASSTYPE_VSIZE (basetype))
843 {
844 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
845 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
846 }
847 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
848 TYPE_BINFO (rec) = base_binfo;
849
850 /* Add only the amount of storage not present in
851 the virtual baseclasses. */
852
853 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
854 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
855 {
856 inc = MAX (record_align,
857 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
858 - int_vbase_size));
859
860 /* Record must have at least as much alignment as any field. */
861 desired_align = TYPE_ALIGN (basetype);
862 record_align = MAX (record_align, desired_align);
863
864 const_size += inc;
865 }
866 }
867
868 if (const_size)
869 CLASSTYPE_SIZE (rec) = size_int (const_size);
870 else
871 CLASSTYPE_SIZE (rec) = integer_zero_node;
872 CLASSTYPE_ALIGN (rec) = record_align;
873
874 return vbase_decls;
875 }
876 \f
877 /* Hashing of lists so that we don't make duplicates.
878 The entry point is `list_hash_canon'. */
879
880 /* Each hash table slot is a bucket containing a chain
881 of these structures. */
882
883 struct list_hash
884 {
885 struct list_hash *next; /* Next structure in the bucket. */
886 int hashcode; /* Hash code of this list. */
887 tree list; /* The list recorded here. */
888 };
889
890 /* Now here is the hash table. When recording a list, it is added
891 to the slot whose index is the hash code mod the table size.
892 Note that the hash table is used for several kinds of lists.
893 While all these live in the same table, they are completely independent,
894 and the hash code is computed differently for each of these. */
895
896 #define TYPE_HASH_SIZE 59
897 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
898
899 /* Compute a hash code for a list (chain of TREE_LIST nodes
900 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
901 TREE_COMMON slots), by adding the hash codes of the individual entries. */
902
903 int
904 list_hash (list)
905 tree list;
906 {
907 register int hashcode = 0;
908
909 if (TREE_CHAIN (list))
910 hashcode += TYPE_HASH (TREE_CHAIN (list));
911
912 if (TREE_VALUE (list))
913 hashcode += TYPE_HASH (TREE_VALUE (list));
914 else
915 hashcode += 1007;
916 if (TREE_PURPOSE (list))
917 hashcode += TYPE_HASH (TREE_PURPOSE (list));
918 else
919 hashcode += 1009;
920 return hashcode;
921 }
922
923 /* Look in the type hash table for a type isomorphic to TYPE.
924 If one is found, return it. Otherwise return 0. */
925
926 tree
927 list_hash_lookup (hashcode, list)
928 int hashcode;
929 tree list;
930 {
931 register struct list_hash *h;
932 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
933 if (h->hashcode == hashcode
934 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
935 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
936 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
937 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
938 && TREE_VALUE (h->list) == TREE_VALUE (list)
939 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
940 {
941 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
942 return h->list;
943 }
944 return 0;
945 }
946
947 /* Add an entry to the list-hash-table
948 for a list TYPE whose hash code is HASHCODE. */
949
950 void
951 list_hash_add (hashcode, list)
952 int hashcode;
953 tree list;
954 {
955 register struct list_hash *h;
956
957 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
958 h->hashcode = hashcode;
959 h->list = list;
960 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
961 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
962 }
963
964 /* Given TYPE, and HASHCODE its hash code, return the canonical
965 object for an identical list if one already exists.
966 Otherwise, return TYPE, and record it as the canonical object
967 if it is a permanent object.
968
969 To use this function, first create a list of the sort you want.
970 Then compute its hash code from the fields of the list that
971 make it different from other similar lists.
972 Then call this function and use the value.
973 This function frees the list you pass in if it is a duplicate. */
974
975 /* Set to 1 to debug without canonicalization. Never set by program. */
976 static int debug_no_list_hash = 0;
977
978 tree
979 list_hash_canon (hashcode, list)
980 int hashcode;
981 tree list;
982 {
983 tree t1;
984
985 if (debug_no_list_hash)
986 return list;
987
988 t1 = list_hash_lookup (hashcode, list);
989 if (t1 != 0)
990 {
991 obstack_free (&class_obstack, list);
992 return t1;
993 }
994
995 /* If this is a new list, record it for later reuse. */
996 list_hash_add (hashcode, list);
997
998 return list;
999 }
1000
1001 tree
1002 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1003 int via_public, via_virtual, via_protected;
1004 tree purpose, value, chain;
1005 {
1006 struct obstack *ambient_obstack = current_obstack;
1007 tree t;
1008 int hashcode;
1009
1010 current_obstack = &class_obstack;
1011 t = tree_cons (purpose, value, chain);
1012 TREE_VIA_PUBLIC (t) = via_public;
1013 TREE_VIA_PROTECTED (t) = via_protected;
1014 TREE_VIA_VIRTUAL (t) = via_virtual;
1015 hashcode = list_hash (t);
1016 t = list_hash_canon (hashcode, t);
1017 current_obstack = ambient_obstack;
1018 return t;
1019 }
1020
1021 /* Constructor for hashed lists. */
1022 tree
1023 hash_tree_chain (value, chain)
1024 tree value, chain;
1025 {
1026 struct obstack *ambient_obstack = current_obstack;
1027 tree t;
1028 int hashcode;
1029
1030 current_obstack = &class_obstack;
1031 t = tree_cons (NULL_TREE, value, chain);
1032 hashcode = list_hash (t);
1033 t = list_hash_canon (hashcode, t);
1034 current_obstack = ambient_obstack;
1035 return t;
1036 }
1037
1038 /* Similar, but used for concatenating two lists. */
1039 tree
1040 hash_chainon (list1, list2)
1041 tree list1, list2;
1042 {
1043 if (list2 == 0)
1044 return list1;
1045 if (list1 == 0)
1046 return list2;
1047 if (TREE_CHAIN (list1) == NULL_TREE)
1048 return hash_tree_chain (TREE_VALUE (list1), list2);
1049 return hash_tree_chain (TREE_VALUE (list1),
1050 hash_chainon (TREE_CHAIN (list1), list2));
1051 }
1052
1053 static tree
1054 get_identifier_list (value)
1055 tree value;
1056 {
1057 tree list = IDENTIFIER_AS_LIST (value);
1058 if (list != NULL_TREE
1059 && (TREE_CODE (list) != TREE_LIST
1060 || TREE_VALUE (list) != value))
1061 list = NULL_TREE;
1062 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1063 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1064 && IDENTIFIER_TYPE_VALUE (value)
1065 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1066 {
1067 tree type = IDENTIFIER_TYPE_VALUE (value);
1068
1069 if (TYPE_PTRMEMFUNC_P (type))
1070 list = NULL_TREE;
1071 else if (type == current_class_type)
1072 /* Don't mess up the constructor name. */
1073 list = tree_cons (NULL_TREE, value, NULL_TREE);
1074 else
1075 {
1076 if (! CLASSTYPE_ID_AS_LIST (type))
1077 CLASSTYPE_ID_AS_LIST (type)
1078 = perm_tree_cons (NULL_TREE, TYPE_IDENTIFIER (type), NULL_TREE);
1079 list = CLASSTYPE_ID_AS_LIST (type);
1080 }
1081 }
1082 return list;
1083 }
1084
1085 tree
1086 get_decl_list (value)
1087 tree value;
1088 {
1089 tree list = NULL_TREE;
1090
1091 if (TREE_CODE (value) == IDENTIFIER_NODE)
1092 list = get_identifier_list (value);
1093 else if (TREE_CODE (value) == RECORD_TYPE
1094 && TYPE_LANG_SPECIFIC (value)
1095 && value == TYPE_MAIN_VARIANT (value))
1096 list = CLASSTYPE_AS_LIST (value);
1097
1098 if (list != NULL_TREE)
1099 {
1100 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1101 return list;
1102 }
1103
1104 return build_decl_list (NULL_TREE, value);
1105 }
1106 \f
1107 /* Build an association between TYPE and some parameters:
1108
1109 OFFSET is the offset added to `this' to convert it to a pointer
1110 of type `TYPE *'
1111
1112 BINFO is the base binfo to use, if we are deriving from one. This
1113 is necessary, as we want specialized parent binfos from base
1114 classes, so that the VTABLE_NAMEs of bases are for the most derived
1115 type, instead of of the simple type.
1116
1117 VTABLE is the virtual function table with which to initialize
1118 sub-objects of type TYPE.
1119
1120 VIRTUALS are the virtual functions sitting in VTABLE.
1121
1122 CHAIN are more associations we must retain. */
1123
1124 tree
1125 make_binfo (offset, binfo, vtable, virtuals, chain)
1126 tree offset, binfo;
1127 tree vtable, virtuals;
1128 tree chain;
1129 {
1130 tree new_binfo = make_tree_vec (6);
1131 tree type;
1132
1133 if (TREE_CODE (binfo) == TREE_VEC)
1134 type = BINFO_TYPE (binfo);
1135 else
1136 {
1137 type = binfo;
1138 binfo = TYPE_BINFO (binfo);
1139 }
1140
1141 TREE_CHAIN (new_binfo) = chain;
1142 if (chain)
1143 TREE_USED (new_binfo) = TREE_USED (chain);
1144
1145 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1146 BINFO_OFFSET (new_binfo) = offset;
1147 BINFO_VTABLE (new_binfo) = vtable;
1148 BINFO_VIRTUALS (new_binfo) = virtuals;
1149 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1150
1151 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1152 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1153 return new_binfo;
1154 }
1155
1156 /* Return the binfo value for ELEM in TYPE. */
1157
1158 tree
1159 binfo_value (elem, type)
1160 tree elem;
1161 tree type;
1162 {
1163 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1164 compiler_error ("base class `%s' ambiguous in binfo_value",
1165 TYPE_NAME_STRING (elem));
1166 if (elem == type)
1167 return TYPE_BINFO (type);
1168 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1169 return type;
1170 return get_binfo (elem, type, 0);
1171 }
1172
1173 tree
1174 reverse_path (path)
1175 tree path;
1176 {
1177 register tree prev = 0, tmp, next;
1178 for (tmp = path; tmp; tmp = next)
1179 {
1180 next = BINFO_INHERITANCE_CHAIN (tmp);
1181 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1182 prev = tmp;
1183 }
1184 return prev;
1185 }
1186
1187 void
1188 debug_binfo (elem)
1189 tree elem;
1190 {
1191 unsigned HOST_WIDE_INT n;
1192 tree virtuals;
1193
1194 fprintf (stderr, "type \"%s\"; offset = %d\n",
1195 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1196 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1197 fprintf (stderr, "vtable type:\n");
1198 debug_tree (BINFO_TYPE (elem));
1199 if (BINFO_VTABLE (elem))
1200 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1201 else
1202 fprintf (stderr, "no vtable decl yet\n");
1203 fprintf (stderr, "virtuals:\n");
1204 virtuals = BINFO_VIRTUALS (elem);
1205
1206 n = skip_rtti_stuff (&virtuals);
1207
1208 while (virtuals)
1209 {
1210 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1211 fprintf (stderr, "%s [%d =? %d]\n",
1212 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1213 n, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1214 ++n;
1215 virtuals = TREE_CHAIN (virtuals);
1216 }
1217 }
1218
1219 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1220 We expect a null pointer to mark the end of the chain.
1221 This is the Lisp primitive `length'. */
1222
1223 int
1224 decl_list_length (t)
1225 tree t;
1226 {
1227 register tree tail;
1228 register int len = 0;
1229
1230 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1231 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1232 for (tail = t; tail; tail = DECL_CHAIN (tail))
1233 len++;
1234
1235 return len;
1236 }
1237
1238 int
1239 count_functions (t)
1240 tree t;
1241 {
1242 if (TREE_CODE (t) == FUNCTION_DECL)
1243 return 1;
1244 else if (TREE_CODE (t) == TREE_LIST)
1245 return decl_list_length (TREE_VALUE (t));
1246
1247 my_friendly_abort (359);
1248 return 0;
1249 }
1250
1251 int
1252 is_overloaded_fn (x)
1253 tree x;
1254 {
1255 if (TREE_CODE (x) == FUNCTION_DECL)
1256 return 1;
1257
1258 if (TREE_CODE (x) == TREE_LIST
1259 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1260 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1261 return 1;
1262
1263 return 0;
1264 }
1265
1266 int
1267 really_overloaded_fn (x)
1268 tree x;
1269 {
1270 if (TREE_CODE (x) == TREE_LIST
1271 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1272 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1273 return 1;
1274
1275 return 0;
1276 }
1277
1278 tree
1279 get_first_fn (from)
1280 tree from;
1281 {
1282 if (TREE_CODE (from) == FUNCTION_DECL)
1283 return from;
1284
1285 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1286
1287 return TREE_VALUE (from);
1288 }
1289
1290 tree
1291 fnaddr_from_vtable_entry (entry)
1292 tree entry;
1293 {
1294 if (flag_vtable_thunks)
1295 {
1296 tree func = entry;
1297 if (TREE_CODE (func) == ADDR_EXPR)
1298 func = TREE_OPERAND (func, 0);
1299 if (TREE_CODE (func) == THUNK_DECL)
1300 return DECL_INITIAL (func);
1301 else
1302 return entry;
1303 }
1304 else
1305 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1306 }
1307
1308 tree
1309 function_arg_chain (t)
1310 tree t;
1311 {
1312 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1313 }
1314
1315 int
1316 promotes_to_aggr_type (t, code)
1317 tree t;
1318 enum tree_code code;
1319 {
1320 if (TREE_CODE (t) == code)
1321 t = TREE_TYPE (t);
1322 return IS_AGGR_TYPE (t);
1323 }
1324
1325 int
1326 is_aggr_type_2 (t1, t2)
1327 tree t1, t2;
1328 {
1329 if (TREE_CODE (t1) != TREE_CODE (t2))
1330 return 0;
1331 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1332 }
1333
1334 /* Give message using types TYPE1 and TYPE2 as arguments.
1335 PFN is the function which will print the message;
1336 S is the format string for PFN to use. */
1337 void
1338 message_2_types (pfn, s, type1, type2)
1339 void (*pfn) ();
1340 char *s;
1341 tree type1, type2;
1342 {
1343 tree name1 = TYPE_NAME (type1);
1344 tree name2 = TYPE_NAME (type2);
1345 if (TREE_CODE (name1) == TYPE_DECL)
1346 name1 = DECL_NAME (name1);
1347 if (TREE_CODE (name2) == TYPE_DECL)
1348 name2 = DECL_NAME (name2);
1349 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1350 }
1351 \f
1352 #define PRINT_RING_SIZE 4
1353
1354 char *
1355 lang_printable_name (decl)
1356 tree decl;
1357 {
1358 static tree decl_ring[PRINT_RING_SIZE];
1359 static char *print_ring[PRINT_RING_SIZE];
1360 static int ring_counter;
1361 int i;
1362
1363 /* Only cache functions. */
1364 if (TREE_CODE (decl) != FUNCTION_DECL
1365 || DECL_LANG_SPECIFIC (decl) == 0)
1366 return decl_as_string (decl, 1);
1367
1368 /* See if this print name is lying around. */
1369 for (i = 0; i < PRINT_RING_SIZE; i++)
1370 if (decl_ring[i] == decl)
1371 /* yes, so return it. */
1372 return print_ring[i];
1373
1374 if (++ring_counter == PRINT_RING_SIZE)
1375 ring_counter = 0;
1376
1377 if (current_function_decl != NULL_TREE)
1378 {
1379 if (decl_ring[ring_counter] == current_function_decl)
1380 ring_counter += 1;
1381 if (ring_counter == PRINT_RING_SIZE)
1382 ring_counter = 0;
1383 if (decl_ring[ring_counter] == current_function_decl)
1384 my_friendly_abort (106);
1385 }
1386
1387 if (print_ring[ring_counter])
1388 free (print_ring[ring_counter]);
1389
1390 {
1391 int print_ret_type_p
1392 = (!DECL_CONSTRUCTOR_P (decl)
1393 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1394
1395 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1396 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1397 strcpy (print_ring[ring_counter], name);
1398 decl_ring[ring_counter] = decl;
1399 }
1400 return print_ring[ring_counter];
1401 }
1402 \f
1403 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1404 listed in RAISES. */
1405 tree
1406 build_exception_variant (type, raises)
1407 tree type;
1408 tree raises;
1409 {
1410 tree v = TYPE_MAIN_VARIANT (type);
1411 int constp = TYPE_READONLY (type);
1412 int volatilep = TYPE_VOLATILE (type);
1413
1414 for (; v; v = TYPE_NEXT_VARIANT (v))
1415 {
1416 if (TYPE_READONLY (v) != constp
1417 || TYPE_VOLATILE (v) != volatilep)
1418 continue;
1419
1420 /* @@ This should do set equality, not exact match. */
1421 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1422 /* List of exceptions raised matches previously found list.
1423
1424 @@ Nice to free up storage used in consing up the
1425 @@ list of exceptions raised. */
1426 return v;
1427 }
1428
1429 /* Need to build a new variant. */
1430 v = build_type_copy (type);
1431
1432 if (raises && ! TREE_PERMANENT (raises))
1433 {
1434 push_obstacks_nochange ();
1435 end_temporary_allocation ();
1436 raises = copy_list (raises);
1437 pop_obstacks ();
1438 }
1439
1440 TYPE_RAISES_EXCEPTIONS (v) = raises;
1441 return v;
1442 }
1443
1444 /* Subroutine of copy_to_permanent
1445
1446 Assuming T is a node build bottom-up, make it all exist on
1447 permanent obstack, if it is not permanent already. */
1448
1449 tree
1450 mapcar (t, func)
1451 tree t;
1452 tree (*func)();
1453 {
1454 tree tmp;
1455
1456 if (t == NULL_TREE)
1457 return t;
1458
1459 if (tmp = func (t), tmp != NULL_TREE)
1460 return tmp;
1461
1462 switch (TREE_CODE (t))
1463 {
1464 case ERROR_MARK:
1465 return error_mark_node;
1466
1467 case VAR_DECL:
1468 case FUNCTION_DECL:
1469 case CONST_DECL:
1470 break;
1471
1472 case PARM_DECL:
1473 {
1474 tree chain = TREE_CHAIN (t);
1475 t = copy_node (t);
1476 TREE_CHAIN (t) = mapcar (chain, func);
1477 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1478 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1479 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1480 return t;
1481 }
1482
1483 case TREE_LIST:
1484 {
1485 tree chain = TREE_CHAIN (t);
1486 t = copy_node (t);
1487 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1488 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1489 TREE_CHAIN (t) = mapcar (chain, func);
1490 return t;
1491 }
1492
1493 case TREE_VEC:
1494 {
1495 int len = TREE_VEC_LENGTH (t);
1496
1497 t = copy_node (t);
1498 while (len--)
1499 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1500 return t;
1501 }
1502
1503 case INTEGER_CST:
1504 case REAL_CST:
1505 case STRING_CST:
1506 return copy_node (t);
1507
1508 case COND_EXPR:
1509 case TARGET_EXPR:
1510 case NEW_EXPR:
1511 t = copy_node (t);
1512 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1513 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1514 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1515 return t;
1516
1517 case SAVE_EXPR:
1518 t = copy_node (t);
1519 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1520 return t;
1521
1522 case MODIFY_EXPR:
1523 case PLUS_EXPR:
1524 case MINUS_EXPR:
1525 case MULT_EXPR:
1526 case TRUNC_DIV_EXPR:
1527 case TRUNC_MOD_EXPR:
1528 case MIN_EXPR:
1529 case MAX_EXPR:
1530 case LSHIFT_EXPR:
1531 case RSHIFT_EXPR:
1532 case BIT_IOR_EXPR:
1533 case BIT_XOR_EXPR:
1534 case BIT_AND_EXPR:
1535 case BIT_ANDTC_EXPR:
1536 case TRUTH_ANDIF_EXPR:
1537 case TRUTH_ORIF_EXPR:
1538 case LT_EXPR:
1539 case LE_EXPR:
1540 case GT_EXPR:
1541 case GE_EXPR:
1542 case EQ_EXPR:
1543 case NE_EXPR:
1544 case CEIL_DIV_EXPR:
1545 case FLOOR_DIV_EXPR:
1546 case ROUND_DIV_EXPR:
1547 case CEIL_MOD_EXPR:
1548 case FLOOR_MOD_EXPR:
1549 case ROUND_MOD_EXPR:
1550 case COMPOUND_EXPR:
1551 case PREDECREMENT_EXPR:
1552 case PREINCREMENT_EXPR:
1553 case POSTDECREMENT_EXPR:
1554 case POSTINCREMENT_EXPR:
1555 case CALL_EXPR:
1556 case ARRAY_REF:
1557 case SCOPE_REF:
1558 t = copy_node (t);
1559 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1560 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1561 return t;
1562
1563 case CONVERT_EXPR:
1564 case ADDR_EXPR:
1565 case INDIRECT_REF:
1566 case NEGATE_EXPR:
1567 case BIT_NOT_EXPR:
1568 case TRUTH_NOT_EXPR:
1569 case NOP_EXPR:
1570 case COMPONENT_REF:
1571 t = copy_node (t);
1572 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1573 return t;
1574
1575 case POINTER_TYPE:
1576 tmp = build_pointer_type (mapcar (TREE_TYPE (t), func));
1577 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1578 case REFERENCE_TYPE:
1579 tmp = build_reference_type (mapcar (TREE_TYPE (t), func));
1580 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1581 case FUNCTION_TYPE:
1582 tmp = build_function_type (mapcar (TREE_TYPE (t), func),
1583 mapcar (TYPE_ARG_TYPES (t), func));
1584 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1585 case ARRAY_TYPE:
1586 tmp = build_array_type (mapcar (TREE_TYPE (t), func),
1587 mapcar (TYPE_DOMAIN (t), func));
1588 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1589 case INTEGER_TYPE:
1590 tmp = build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1591 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1592 case OFFSET_TYPE:
1593 tmp = build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1594 mapcar (TREE_TYPE (t), func));
1595 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1596 case METHOD_TYPE:
1597 tmp = build_cplus_method_type
1598 (mapcar (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (t))), func),
1599 mapcar (TREE_TYPE (t), func),
1600 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func));
1601 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1602
1603 case RECORD_TYPE:
1604 if (TYPE_PTRMEMFUNC_P (t))
1605 return build_ptrmemfunc_type
1606 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1607 /* else fall through */
1608
1609 /* This list is incomplete, but should suffice for now.
1610 It is very important that `sorry' not call
1611 `report_error_function'. That could cause an infinite loop. */
1612 default:
1613 sorry ("initializer contains unrecognized tree code");
1614 return error_mark_node;
1615
1616 }
1617 my_friendly_abort (107);
1618 /* NOTREACHED */
1619 return NULL_TREE;
1620 }
1621
1622 static tree
1623 perm_manip (t)
1624 tree t;
1625 {
1626 if (TREE_PERMANENT (t))
1627 return t;
1628 /* Support `void f () { extern int i; A<&i> a; }' */
1629 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
1630 && TREE_PUBLIC (t))
1631 return copy_node (t);
1632 return NULL_TREE;
1633 }
1634
1635 /* Assuming T is a node built bottom-up, make it all exist on
1636 permanent obstack, if it is not permanent already. */
1637 tree
1638 copy_to_permanent (t)
1639 tree t;
1640 {
1641 register struct obstack *ambient_obstack = current_obstack;
1642 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1643 int resume;
1644
1645 if (t == NULL_TREE || TREE_PERMANENT (t))
1646 return t;
1647
1648 saveable_obstack = &permanent_obstack;
1649 current_obstack = saveable_obstack;
1650 resume = suspend_momentary ();
1651
1652 t = mapcar (t, perm_manip);
1653
1654 resume_momentary (resume);
1655 current_obstack = ambient_obstack;
1656 saveable_obstack = ambient_saveable_obstack;
1657
1658 return t;
1659 }
1660
1661 #ifdef GATHER_STATISTICS
1662 extern int depth_reached;
1663 #endif
1664
1665 void
1666 print_lang_statistics ()
1667 {
1668 extern struct obstack maybepermanent_obstack, decl_obstack;
1669 print_obstack_statistics ("class_obstack", &class_obstack);
1670 print_obstack_statistics ("decl_obstack", &decl_obstack);
1671 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1672 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1673 print_search_statistics ();
1674 print_class_statistics ();
1675 #ifdef GATHER_STATISTICS
1676 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
1677 depth_reached);
1678 #endif
1679 }
1680
1681 /* This is used by the `assert' macro. It is provided in libgcc.a,
1682 which `cc' doesn't know how to link. Note that the C++ front-end
1683 no longer actually uses the `assert' macro (instead, it calls
1684 my_friendly_assert). But all of the back-end files still need this. */
1685 void
1686 __eprintf (string, expression, line, filename)
1687 #ifdef __STDC__
1688 const char *string;
1689 const char *expression;
1690 unsigned line;
1691 const char *filename;
1692 #else
1693 char *string;
1694 char *expression;
1695 unsigned line;
1696 char *filename;
1697 #endif
1698 {
1699 fprintf (stderr, string, expression, line, filename);
1700 fflush (stderr);
1701 abort ();
1702 }
1703
1704 /* Return, as an INTEGER_CST node, the number of elements for
1705 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1706
1707 tree
1708 array_type_nelts_top (type)
1709 tree type;
1710 {
1711 return fold (build (PLUS_EXPR, sizetype,
1712 array_type_nelts (type),
1713 integer_one_node));
1714 }
1715
1716 /* Return, as an INTEGER_CST node, the number of elements for
1717 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1718 ARRAY_TYPEs that are clumped together. */
1719
1720 tree
1721 array_type_nelts_total (type)
1722 tree type;
1723 {
1724 tree sz = array_type_nelts_top (type);
1725 type = TREE_TYPE (type);
1726 while (TREE_CODE (type) == ARRAY_TYPE)
1727 {
1728 tree n = array_type_nelts_top (type);
1729 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1730 type = TREE_TYPE (type);
1731 }
1732 return sz;
1733 }
1734
1735 static
1736 tree
1737 bot_manip (t)
1738 tree t;
1739 {
1740 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1741 return t;
1742 else if (TREE_CODE (t) == TARGET_EXPR)
1743 return build_cplus_new (TREE_TYPE (t),
1744 break_out_target_exprs (TREE_OPERAND (t, 1)));
1745 return NULL_TREE;
1746 }
1747
1748 /* Actually, we'll just clean out the target exprs for the moment. */
1749 tree
1750 break_out_target_exprs (t)
1751 tree t;
1752 {
1753 return mapcar (t, bot_manip);
1754 }
1755
1756 /* Since cleanup may have SAVE_EXPRs in it, we protect it with an
1757 UNSAVE_EXPR as the backend cannot yet handle SAVE_EXPRs in cleanups
1758 by itself. */
1759 int
1760 cp_expand_decl_cleanup (decl, cleanup)
1761 tree decl, cleanup;
1762 {
1763 return expand_decl_cleanup (decl, unsave_expr (cleanup));
1764 }
1765
1766 /* Obstack used for allocating nodes in template function and variable
1767 definitions. */
1768
1769 extern struct obstack *expression_obstack;
1770
1771 /* Similar to `build_nt', except we build
1772 on the permanent_obstack, regardless. */
1773
1774 tree
1775 build_min_nt VPROTO((enum tree_code code, ...))
1776 {
1777 #ifndef __STDC__
1778 enum tree_code code;
1779 #endif
1780 register struct obstack *ambient_obstack = expression_obstack;
1781 va_list p;
1782 register tree t;
1783 register int length;
1784 register int i;
1785
1786 VA_START (p, code);
1787
1788 #ifndef __STDC__
1789 code = va_arg (p, enum tree_code);
1790 #endif
1791
1792 expression_obstack = &permanent_obstack;
1793
1794 t = make_node (code);
1795 length = tree_code_length[(int) code];
1796 TREE_COMPLEXITY (t) = lineno;
1797
1798 for (i = 0; i < length; i++)
1799 {
1800 tree x = va_arg (p, tree);
1801 TREE_OPERAND (t, i) = copy_to_permanent (x);
1802 }
1803
1804 va_end (p);
1805 expression_obstack = ambient_obstack;
1806 return t;
1807 }
1808
1809 /* Similar to `build', except we build
1810 on the permanent_obstack, regardless. */
1811
1812 tree
1813 build_min VPROTO((enum tree_code code, tree tt, ...))
1814 {
1815 #ifndef __STDC__
1816 enum tree_code code;
1817 tree tt;
1818 #endif
1819 register struct obstack *ambient_obstack = expression_obstack;
1820 va_list p;
1821 register tree t;
1822 register int length;
1823 register int i;
1824
1825 VA_START (p, tt);
1826
1827 #ifndef __STDC__
1828 code = va_arg (p, enum tree_code);
1829 tt = va_arg (p, tree);
1830 #endif
1831
1832 expression_obstack = &permanent_obstack;
1833
1834 t = make_node (code);
1835 length = tree_code_length[(int) code];
1836 TREE_TYPE (t) = tt;
1837 TREE_COMPLEXITY (t) = lineno;
1838
1839 for (i = 0; i < length; i++)
1840 {
1841 tree x = va_arg (p, tree);
1842 TREE_OPERAND (t, i) = copy_to_permanent (x);
1843 }
1844
1845 va_end (p);
1846 expression_obstack = ambient_obstack;
1847 return t;
1848 }
1849
1850 /* Same as `tree_cons' but make a permanent object. */
1851
1852 tree
1853 min_tree_cons (purpose, value, chain)
1854 tree purpose, value, chain;
1855 {
1856 register tree node;
1857 register struct obstack *ambient_obstack = current_obstack;
1858 current_obstack = &permanent_obstack;
1859
1860 node = tree_cons (copy_to_permanent (purpose),
1861 copy_to_permanent (value), chain);
1862 current_obstack = ambient_obstack;
1863 return node;
1864 }
1865
1866 tree
1867 get_type_decl (t)
1868 tree t;
1869 {
1870 if (TREE_CODE (t) == IDENTIFIER_NODE)
1871 return identifier_typedecl_value (t);
1872 if (TREE_CODE (t) == TYPE_DECL)
1873 return t;
1874 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
1875 return TYPE_STUB_DECL (t);
1876
1877 my_friendly_abort (42);
1878 }
1879
1880 int
1881 can_free (obstack, t)
1882 struct obstack *obstack;
1883 tree t;
1884 {
1885 int size;
1886
1887 if (TREE_CODE (t) == TREE_VEC)
1888 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
1889 else
1890 my_friendly_abort (42);
1891
1892 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
1893 & ~ obstack_alignment_mask (obstack))
1894 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
1895 return 1;
1896 #undef ROUND
1897
1898 return 0;
1899 }
1900
1901 /* Return first vector element whose BINFO_TYPE is ELEM.
1902 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
1903
1904 tree
1905 vec_binfo_member (elem, vec)
1906 tree elem, vec;
1907 {
1908 int i;
1909
1910 if (vec)
1911 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
1912 if (elem == BINFO_TYPE (TREE_VEC_ELT (vec, i)))
1913 return TREE_VEC_ELT (vec, i);
1914
1915 return NULL_TREE;
1916 }
1917
1918 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
1919 the wrong thing for decl_function_context. Hopefully the uses in the
1920 backend won't matter, since we don't need a static chain for local class
1921 methods. FIXME! */
1922
1923 tree
1924 hack_decl_function_context (decl)
1925 tree decl;
1926 {
1927 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
1928 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
1929 return decl_function_context (decl);
1930 }
1931
1932 /* Return truthvalue of whether T1 is the same tree structure as T2.
1933 Return 1 if they are the same.
1934 Return 0 if they are understandably different.
1935 Return -1 if either contains tree structure not understood by
1936 this function. */
1937
1938 int
1939 cp_tree_equal (t1, t2)
1940 tree t1, t2;
1941 {
1942 register enum tree_code code1, code2;
1943 int cmp;
1944
1945 if (t1 == t2)
1946 return 1;
1947 if (t1 == 0 || t2 == 0)
1948 return 0;
1949
1950 code1 = TREE_CODE (t1);
1951 code2 = TREE_CODE (t2);
1952
1953 if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
1954 if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
1955 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
1956 else
1957 return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
1958 else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
1959 || code2 == NON_LVALUE_EXPR)
1960 return cp_tree_equal (t1, TREE_OPERAND (t2, 0));
1961
1962 if (code1 != code2)
1963 return 0;
1964
1965 switch (code1)
1966 {
1967 case INTEGER_CST:
1968 return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
1969 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);
1970
1971 case REAL_CST:
1972 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
1973
1974 case STRING_CST:
1975 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
1976 && !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
1977 TREE_STRING_LENGTH (t1));
1978
1979 case CONSTRUCTOR:
1980 abort ();
1981
1982 case SAVE_EXPR:
1983 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
1984
1985 case CALL_EXPR:
1986 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
1987 if (cmp <= 0)
1988 return cmp;
1989 return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
1990
1991 case TARGET_EXPR:
1992 /* Special case: if either target is an unallocated VAR_DECL,
1993 it means that it's going to be unified with whatever the
1994 TARGET_EXPR is really supposed to initialize, so treat it
1995 as being equivalent to anything. */
1996 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
1997 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
1998 && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
1999 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
2000 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
2001 && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
2002 cmp = 1;
2003 else
2004 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2005 if (cmp <= 0)
2006 return cmp;
2007 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2008
2009 case WITH_CLEANUP_EXPR:
2010 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2011 if (cmp <= 0)
2012 return cmp;
2013 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));
2014
2015 case COMPONENT_REF:
2016 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
2017 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2018 return 0;
2019
2020 case VAR_DECL:
2021 case PARM_DECL:
2022 case CONST_DECL:
2023 case FUNCTION_DECL:
2024 return 0;
2025
2026 case TEMPLATE_CONST_PARM:
2027 return TEMPLATE_CONST_IDX (t1) == TEMPLATE_CONST_IDX (t2);
2028
2029 case SIZEOF_EXPR:
2030 if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
2031 return 0;
2032 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2033 return comptypes (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0), 1);
2034 break;
2035 }
2036
2037 switch (TREE_CODE_CLASS (code1))
2038 {
2039 int i;
2040 case '1':
2041 case '2':
2042 case '<':
2043 case 'e':
2044 case 'r':
2045 case 's':
2046 cmp = 1;
2047 for (i=0; i<tree_code_length[(int) code1]; ++i)
2048 {
2049 cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
2050 if (cmp <= 0)
2051 return cmp;
2052 }
2053 return cmp;
2054 }
2055
2056 return -1;
2057 }