tree.c (walk_tree): Don't recurse into DECL_INITIAL or DECL_SIZE unless...
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "toplev.h"
30 #include "ggc.h"
31 #include "insn-config.h"
32 #include "integrate.h"
33
34 static tree bot_manip PROTO((tree *, int *, void *));
35 static tree bot_replace PROTO((tree *, int *, void *));
36 static tree build_cplus_array_type_1 PROTO((tree, tree));
37 static void list_hash_add PROTO((int, tree));
38 static int list_hash PROTO((tree, tree, tree));
39 static tree list_hash_lookup PROTO((int, tree, tree, tree));
40 static void propagate_binfo_offsets PROTO((tree, tree));
41 static int avoid_overlap PROTO((tree, tree));
42 static cp_lvalue_kind lvalue_p_1 PROTO((tree, int));
43 static tree no_linkage_helper PROTO((tree *, int *, void *));
44 static tree build_srcloc PROTO((char *, int));
45 static void mark_list_hash PROTO ((void *));
46 static int statement_code_p PROTO((enum tree_code));
47 static tree mark_local_for_remap_r PROTO((tree *, int *, void *));
48 static tree cp_unsave_r PROTO ((tree *, int *, void *));
49 static void cp_unsave PROTO((tree *));
50 static tree build_target_expr PROTO((tree, tree));
51
52 #define CEIL(x,y) (((x) + (y) - 1) / (y))
53
54 /* If REF is an lvalue, returns the kind of lvalue that REF is.
55 Otherwise, returns clk_none. If TREAT_CLASS_RVALUES_AS_LVALUES is
56 non-zero, rvalues of class type are considered lvalues. */
57
58 static cp_lvalue_kind
59 lvalue_p_1 (ref, treat_class_rvalues_as_lvalues)
60 tree ref;
61 int treat_class_rvalues_as_lvalues;
62 {
63 cp_lvalue_kind op1_lvalue_kind = clk_none;
64 cp_lvalue_kind op2_lvalue_kind = clk_none;
65
66 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
67 return clk_ordinary;
68
69 if (ref == current_class_ptr && flag_this_is_variable <= 0)
70 return clk_none;
71
72 switch (TREE_CODE (ref))
73 {
74 /* preincrements and predecrements are valid lvals, provided
75 what they refer to are valid lvals. */
76 case PREINCREMENT_EXPR:
77 case PREDECREMENT_EXPR:
78 case SAVE_EXPR:
79 case UNSAVE_EXPR:
80 case TRY_CATCH_EXPR:
81 case WITH_CLEANUP_EXPR:
82 case REALPART_EXPR:
83 case IMAGPART_EXPR:
84 case NOP_EXPR:
85 return lvalue_p_1 (TREE_OPERAND (ref, 0),
86 treat_class_rvalues_as_lvalues);
87
88 case COMPONENT_REF:
89 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
90 treat_class_rvalues_as_lvalues);
91 if (op1_lvalue_kind
92 /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
93 situations. */
94 && TREE_CODE (TREE_OPERAND (ref, 1)) == FIELD_DECL
95 && DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
96 {
97 /* Clear the ordinary bit. If this object was a class
98 rvalue we want to preserve that information. */
99 op1_lvalue_kind &= ~clk_ordinary;
100 /* The lvalue is for a btifield. */
101 op1_lvalue_kind |= clk_bitfield;
102 }
103 return op1_lvalue_kind;
104
105 case STRING_CST:
106 return clk_ordinary;
107
108 case VAR_DECL:
109 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
110 && DECL_LANG_SPECIFIC (ref)
111 && DECL_IN_AGGR_P (ref))
112 return clk_none;
113 case INDIRECT_REF:
114 case ARRAY_REF:
115 case PARM_DECL:
116 case RESULT_DECL:
117 if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
118 return clk_ordinary;
119 break;
120
121 /* A currently unresolved scope ref. */
122 case SCOPE_REF:
123 my_friendly_abort (103);
124 case OFFSET_REF:
125 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
126 return clk_ordinary;
127 /* Fall through. */
128 case MAX_EXPR:
129 case MIN_EXPR:
130 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
131 treat_class_rvalues_as_lvalues);
132 op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
133 treat_class_rvalues_as_lvalues);
134 break;
135
136 case COND_EXPR:
137 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
138 treat_class_rvalues_as_lvalues);
139 op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2),
140 treat_class_rvalues_as_lvalues);
141 break;
142
143 case MODIFY_EXPR:
144 return clk_ordinary;
145
146 case COMPOUND_EXPR:
147 return lvalue_p_1 (TREE_OPERAND (ref, 1),
148 treat_class_rvalues_as_lvalues);
149
150 case TARGET_EXPR:
151 return treat_class_rvalues_as_lvalues ? clk_class : clk_none;
152
153 case CALL_EXPR:
154 case VA_ARG_EXPR:
155 return ((treat_class_rvalues_as_lvalues
156 && IS_AGGR_TYPE (TREE_TYPE (ref)))
157 ? clk_class : clk_none);
158
159 case FUNCTION_DECL:
160 /* All functions (except non-static-member functions) are
161 lvalues. */
162 return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref)
163 ? clk_none : clk_ordinary);
164
165 default:
166 break;
167 }
168
169 /* If one operand is not an lvalue at all, then this expression is
170 not an lvalue. */
171 if (!op1_lvalue_kind || !op2_lvalue_kind)
172 return clk_none;
173
174 /* Otherwise, it's an lvalue, and it has all the odd properties
175 contributed by either operand. */
176 op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
177 /* It's not an ordinary lvalue if it involves either a bit-field or
178 a class rvalue. */
179 if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
180 op1_lvalue_kind &= ~clk_ordinary;
181 return op1_lvalue_kind;
182 }
183
184 /* If REF is an lvalue, returns the kind of lvalue that REF is.
185 Otherwise, returns clk_none. Lvalues can be assigned, unless they
186 have TREE_READONLY, or unless they are FUNCTION_DECLs. Lvalues can
187 have their address taken, unless they have DECL_REGISTER. */
188
189 cp_lvalue_kind
190 real_lvalue_p (ref)
191 tree ref;
192 {
193 return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/0);
194 }
195
196 /* This differs from real_lvalue_p in that class rvalues are
197 considered lvalues. */
198
199 int
200 lvalue_p (ref)
201 tree ref;
202 {
203 return
204 (lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1) != clk_none);
205 }
206
207 /* Return nonzero if REF is an lvalue valid for this language;
208 otherwise, print an error message and return zero. */
209
210 int
211 lvalue_or_else (ref, string)
212 tree ref;
213 const char *string;
214 {
215 int win = lvalue_p (ref);
216 if (! win)
217 error ("non-lvalue in %s", string);
218 return win;
219 }
220
221 /* Build a TARGET_EXPR, initializing the DECL with the VALUE. */
222
223 static tree
224 build_target_expr (decl, value)
225 tree decl;
226 tree value;
227 {
228 tree t;
229
230 t = build (TARGET_EXPR, TREE_TYPE (decl), decl, value,
231 maybe_build_cleanup (decl), NULL_TREE);
232 /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
233 ignore the TARGET_EXPR. If there really turn out to be no
234 side-effects, then the optimizer should be able to get rid of
235 whatever code is generated anyhow. */
236 TREE_SIDE_EFFECTS (t) = 1;
237
238 return t;
239 }
240
241 /* INIT is a CALL_EXPR which needs info about its target.
242 TYPE is the type that this initialization should appear to have.
243
244 Build an encapsulation of the initialization to perform
245 and return it so that it can be processed by language-independent
246 and language-specific expression expanders. */
247
248 tree
249 build_cplus_new (type, init)
250 tree type;
251 tree init;
252 {
253 tree fn;
254 tree slot;
255 tree rval;
256
257 /* Make sure that we're not trying to create an instance of an
258 abstract class. */
259 abstract_virtuals_error (NULL_TREE, type);
260
261 if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR)
262 return convert (type, init);
263
264 slot = build (VAR_DECL, type);
265 DECL_ARTIFICIAL (slot) = 1;
266 DECL_CONTEXT (slot) = current_function_decl;
267 layout_decl (slot, 0);
268
269 /* We split the CALL_EXPR into its function and its arguments here.
270 Then, in expand_expr, we put them back together. The reason for
271 this is that this expression might be a default argument
272 expression. In that case, we need a new temporary every time the
273 expression is used. That's what break_out_target_exprs does; it
274 replaces every AGGR_INIT_EXPR with a copy that uses a fresh
275 temporary slot. Then, expand_expr builds up a call-expression
276 using the new slot. */
277 fn = TREE_OPERAND (init, 0);
278 rval = build (AGGR_INIT_EXPR, type, fn, TREE_OPERAND (init, 1), slot);
279 TREE_SIDE_EFFECTS (rval) = 1;
280 AGGR_INIT_VIA_CTOR_P (rval)
281 = (TREE_CODE (fn) == ADDR_EXPR
282 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
283 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));
284 rval = build_target_expr (slot, rval);
285
286 return rval;
287 }
288
289 /* Buidl a TARGET_EXPR using INIT to initialize a new temporary of the
290 indicated TYPE. */
291
292 tree
293 build_target_expr_with_type (init, type)
294 tree init;
295 tree type;
296 {
297 tree slot;
298 tree rval;
299
300 slot = build (VAR_DECL, type);
301 DECL_ARTIFICIAL (slot) = 1;
302 DECL_CONTEXT (slot) = current_function_decl;
303 layout_decl (slot, 0);
304 rval = build_target_expr (slot, init);
305
306 return rval;
307 }
308
309 /* Like build_target_expr_with_type, but use the type of INIT. */
310
311 tree
312 get_target_expr (init)
313 tree init;
314 {
315 return build_target_expr_with_type (init, TREE_TYPE (init));
316 }
317
318 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
319 these CALL_EXPRs with tree nodes that will perform the cleanups. */
320
321 tree
322 break_out_cleanups (exp)
323 tree exp;
324 {
325 tree tmp = exp;
326
327 if (TREE_CODE (tmp) == CALL_EXPR
328 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
329 return build_cplus_new (TREE_TYPE (tmp), tmp);
330
331 while (TREE_CODE (tmp) == NOP_EXPR
332 || TREE_CODE (tmp) == CONVERT_EXPR
333 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
334 {
335 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
336 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
337 {
338 TREE_OPERAND (tmp, 0)
339 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
340 TREE_OPERAND (tmp, 0));
341 break;
342 }
343 else
344 tmp = TREE_OPERAND (tmp, 0);
345 }
346 return exp;
347 }
348
349 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
350 copies where they are found. Returns a deep copy all nodes transitively
351 containing CALL_EXPRs. */
352
353 tree
354 break_out_calls (exp)
355 tree exp;
356 {
357 register tree t1, t2 = NULL_TREE;
358 register enum tree_code code;
359 register int changed = 0;
360 register int i;
361
362 if (exp == NULL_TREE)
363 return exp;
364
365 code = TREE_CODE (exp);
366
367 if (code == CALL_EXPR)
368 return copy_node (exp);
369
370 /* Don't try and defeat a save_expr, as it should only be done once. */
371 if (code == SAVE_EXPR)
372 return exp;
373
374 switch (TREE_CODE_CLASS (code))
375 {
376 default:
377 abort ();
378
379 case 'c': /* a constant */
380 case 't': /* a type node */
381 case 'x': /* something random, like an identifier or an ERROR_MARK. */
382 return exp;
383
384 case 'd': /* A decl node */
385 #if 0 /* This is bogus. jason 9/21/94 */
386
387 t1 = break_out_calls (DECL_INITIAL (exp));
388 if (t1 != DECL_INITIAL (exp))
389 {
390 exp = copy_node (exp);
391 DECL_INITIAL (exp) = t1;
392 }
393 #endif
394 return exp;
395
396 case 'b': /* A block node */
397 {
398 /* Don't know how to handle these correctly yet. Must do a
399 break_out_calls on all DECL_INITIAL values for local variables,
400 and also break_out_calls on all sub-blocks and sub-statements. */
401 abort ();
402 }
403 return exp;
404
405 case 'e': /* an expression */
406 case 'r': /* a reference */
407 case 's': /* an expression with side effects */
408 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
409 {
410 t1 = break_out_calls (TREE_OPERAND (exp, i));
411 if (t1 != TREE_OPERAND (exp, i))
412 {
413 exp = copy_node (exp);
414 TREE_OPERAND (exp, i) = t1;
415 }
416 }
417 return exp;
418
419 case '<': /* a comparison expression */
420 case '2': /* a binary arithmetic expression */
421 t2 = break_out_calls (TREE_OPERAND (exp, 1));
422 if (t2 != TREE_OPERAND (exp, 1))
423 changed = 1;
424 case '1': /* a unary arithmetic expression */
425 t1 = break_out_calls (TREE_OPERAND (exp, 0));
426 if (t1 != TREE_OPERAND (exp, 0))
427 changed = 1;
428 if (changed)
429 {
430 if (tree_code_length[(int) code] == 1)
431 return build1 (code, TREE_TYPE (exp), t1);
432 else
433 return build (code, TREE_TYPE (exp), t1, t2);
434 }
435 return exp;
436 }
437
438 }
439 \f
440 extern struct obstack permanent_obstack;
441
442 /* Here is how primitive or already-canonicalized types' hash
443 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
444 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
445
446 /* Construct, lay out and return the type of methods belonging to class
447 BASETYPE and whose arguments are described by ARGTYPES and whose values
448 are described by RETTYPE. If each type exists already, reuse it. */
449
450 tree
451 build_cplus_method_type (basetype, rettype, argtypes)
452 tree basetype, rettype, argtypes;
453 {
454 register tree t;
455 tree ptype;
456 int hashcode;
457
458 /* Make a node of the sort we want. */
459 t = make_node (METHOD_TYPE);
460
461 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
462 TREE_TYPE (t) = rettype;
463 ptype = build_pointer_type (basetype);
464
465 /* The actual arglist for this function includes a "hidden" argument
466 which is "this". Put it into the list of argument types. Make
467 sure that the new argument list is allocated on the same obstack
468 as the type. */
469 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
470 TYPE_ARG_TYPES (t) = argtypes;
471 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
472
473 /* If we already have such a type, use the old one and free this one.
474 Note that it also frees up the above cons cell if found. */
475 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) +
476 type_hash_list (argtypes);
477
478 t = type_hash_canon (hashcode, t);
479
480 if (TYPE_SIZE (t) == 0)
481 layout_type (t);
482
483 return t;
484 }
485
486 static tree
487 build_cplus_array_type_1 (elt_type, index_type)
488 tree elt_type;
489 tree index_type;
490 {
491 tree t;
492
493 if (elt_type == error_mark_node || index_type == error_mark_node)
494 return error_mark_node;
495
496 if (processing_template_decl
497 || uses_template_parms (elt_type)
498 || uses_template_parms (index_type))
499 {
500 t = make_node (ARRAY_TYPE);
501 TREE_TYPE (t) = elt_type;
502 TYPE_DOMAIN (t) = index_type;
503 }
504 else
505 t = build_array_type (elt_type, index_type);
506
507 /* Push these needs up so that initialization takes place
508 more easily. */
509 TYPE_NEEDS_CONSTRUCTING (t)
510 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
511 TYPE_NEEDS_DESTRUCTOR (t)
512 = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
513 return t;
514 }
515
516 tree
517 build_cplus_array_type (elt_type, index_type)
518 tree elt_type;
519 tree index_type;
520 {
521 tree t;
522 int type_quals = CP_TYPE_QUALS (elt_type);
523
524 elt_type = TYPE_MAIN_VARIANT (elt_type);
525
526 t = build_cplus_array_type_1 (elt_type, index_type);
527
528 if (type_quals != TYPE_UNQUALIFIED)
529 t = cp_build_qualified_type (t, type_quals);
530
531 return t;
532 }
533 \f
534 /* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles
535 arrays correctly. In particular, if TYPE is an array of T's, and
536 TYPE_QUALS is non-empty, returns an array of qualified T's. If
537 at attempt is made to qualify a type illegally, and COMPLAIN is
538 non-zero, an error is issued. If COMPLAIN is zero, error_mark_node
539 is returned. */
540
541 tree
542 cp_build_qualified_type_real (type, type_quals, complain)
543 tree type;
544 int type_quals;
545 int complain;
546 {
547 tree result;
548
549 if (type == error_mark_node)
550 return type;
551
552 if (type_quals == TYPE_QUALS (type))
553 return type;
554
555 /* A restrict-qualified pointer type must be a pointer (or reference)
556 to object or incomplete type. */
557 if ((type_quals & TYPE_QUAL_RESTRICT)
558 && TREE_CODE (type) != TEMPLATE_TYPE_PARM
559 && (!POINTER_TYPE_P (type)
560 || TYPE_PTRMEM_P (type)
561 || TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE))
562 {
563 if (complain)
564 cp_error ("`%T' cannot be `restrict'-qualified", type);
565 else
566 return error_mark_node;
567
568 type_quals &= ~TYPE_QUAL_RESTRICT;
569 }
570
571 if (type_quals != TYPE_UNQUALIFIED
572 && TREE_CODE (type) == FUNCTION_TYPE)
573 {
574 if (complain)
575 cp_error ("`%T' cannot be `const'-, `volatile'-, or `restrict'-qualified", type);
576 else
577 return error_mark_node;
578 type_quals = TYPE_UNQUALIFIED;
579 }
580 else if (TREE_CODE (type) == ARRAY_TYPE)
581 {
582 /* In C++, the qualification really applies to the array element
583 type. Obtain the appropriately qualified element type. */
584 tree t;
585 tree element_type
586 = cp_build_qualified_type_real (TREE_TYPE (type),
587 type_quals,
588 complain);
589
590 if (element_type == error_mark_node)
591 return error_mark_node;
592
593 /* See if we already have an identically qualified type. */
594 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
595 if (CP_TYPE_QUALS (t) == type_quals)
596 break;
597
598 /* If we didn't already have it, create it now. */
599 if (!t)
600 {
601 /* Make a new array type, just like the old one, but with the
602 appropriately qualified element type. */
603 t = build_type_copy (type);
604 TREE_TYPE (t) = element_type;
605 }
606
607 /* Even if we already had this variant, we update
608 TYPE_NEEDS_CONSTRUCTING and TYPE_NEEDS_DESTRUCTOR in case
609 they changed since the variant was originally created.
610
611 This seems hokey; if there is some way to use a previous
612 variant *without* coming through here,
613 TYPE_NEEDS_CONSTRUCTING will never be updated. */
614 TYPE_NEEDS_CONSTRUCTING (t)
615 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
616 TYPE_NEEDS_DESTRUCTOR (t)
617 = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
618 return t;
619 }
620 else if (TYPE_PTRMEMFUNC_P (type))
621 {
622 /* For a pointer-to-member type, we can't just return a
623 cv-qualified version of the RECORD_TYPE. If we do, we
624 haven't change the field that contains the actual pointer to
625 a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong. */
626 tree t;
627
628 t = TYPE_PTRMEMFUNC_FN_TYPE (type);
629 t = cp_build_qualified_type_real (t, type_quals, complain);
630 return build_ptrmemfunc_type (t);
631 }
632
633 /* Retrieve (or create) the appropriately qualified variant. */
634 result = build_qualified_type (type, type_quals);
635
636 /* If this was a pointer-to-method type, and we just made a copy,
637 then we need to clear the cached associated
638 pointer-to-member-function type; it is not valid for the new
639 type. */
640 if (result != type
641 && TREE_CODE (type) == POINTER_TYPE
642 && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
643 TYPE_SET_PTRMEMFUNC_TYPE (result, NULL_TREE);
644
645 return result;
646 }
647
648 /* Returns the canonical version of TYPE. In other words, if TYPE is
649 a typedef, returns the underlying type. The cv-qualification of
650 the type returned matches the type input; they will always be
651 compatible types. */
652
653 tree
654 canonical_type_variant (t)
655 tree t;
656 {
657 return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), CP_TYPE_QUALS (t));
658 }
659 \f
660 /* Add OFFSET to all base types of T.
661
662 OFFSET, which is a type offset, is number of bytes.
663
664 Note that we don't have to worry about having two paths to the
665 same base type, since this type owns its association list. */
666
667 static void
668 propagate_binfo_offsets (binfo, offset)
669 tree binfo;
670 tree offset;
671 {
672 tree binfos = BINFO_BASETYPES (binfo);
673 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
674
675 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
676 {
677 tree base_binfo = TREE_VEC_ELT (binfos, i);
678
679 if (TREE_VIA_VIRTUAL (base_binfo))
680 i += 1;
681 else
682 {
683 int j;
684 tree delta = NULL_TREE;
685
686 for (j = i+1; j < n_baselinks; j++)
687 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
688 {
689 /* The next basetype offset must take into account the space
690 between the classes, not just the size of each class. */
691 delta = size_binop (MINUS_EXPR,
692 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
693 BINFO_OFFSET (base_binfo));
694 break;
695 }
696
697 #if 0
698 if (BINFO_OFFSET_ZEROP (base_binfo))
699 BINFO_OFFSET (base_binfo) = offset;
700 else
701 BINFO_OFFSET (base_binfo)
702 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
703 #else
704 BINFO_OFFSET (base_binfo) = offset;
705 #endif
706
707 propagate_binfo_offsets (base_binfo, offset);
708
709 /* Go to our next class that counts for offset propagation. */
710 i = j;
711 if (i < n_baselinks)
712 offset = size_binop (PLUS_EXPR, offset, delta);
713 }
714 }
715 }
716
717 /* Makes new binfos for the indirect bases under BINFO, and updates
718 BINFO_OFFSET for them and their bases. */
719
720 void
721 unshare_base_binfos (binfo)
722 tree binfo;
723 {
724 tree binfos = BINFO_BASETYPES (binfo);
725 tree new_binfo;
726 int j;
727
728 if (binfos == NULL_TREE)
729 return;
730
731 /* Now unshare the structure beneath BINFO. */
732 for (j = TREE_VEC_LENGTH (binfos)-1;
733 j >= 0; j--)
734 {
735 tree base_binfo = TREE_VEC_ELT (binfos, j);
736 new_binfo = TREE_VEC_ELT (binfos, j)
737 = make_binfo (BINFO_OFFSET (base_binfo),
738 base_binfo,
739 BINFO_VTABLE (base_binfo),
740 BINFO_VIRTUALS (base_binfo));
741 TREE_VIA_PUBLIC (new_binfo) = TREE_VIA_PUBLIC (base_binfo);
742 TREE_VIA_PROTECTED (new_binfo) = TREE_VIA_PROTECTED (base_binfo);
743 TREE_VIA_VIRTUAL (new_binfo) = TREE_VIA_VIRTUAL (base_binfo);
744 BINFO_INHERITANCE_CHAIN (new_binfo) = binfo;
745 unshare_base_binfos (new_binfo);
746 }
747 }
748
749 /* Finish the work of layout_record, now taking virtual bases into account.
750 Also compute the actual offsets that our base classes will have.
751 This must be performed after the fields are laid out, since virtual
752 baseclasses must lay down at the end of the record.
753
754 Returns the maximum number of virtual functions any of the
755 baseclasses provide. */
756
757 int
758 layout_basetypes (rec, max)
759 tree rec;
760 int max;
761 {
762 tree binfos = TYPE_BINFO_BASETYPES (rec);
763 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
764
765 tree vbase_types;
766
767 unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
768 unsigned int desired_align;
769
770 /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer. */
771 register unsigned int const_size = 0;
772 unsigned int nonvirtual_const_size;
773
774 #ifdef STRUCTURE_SIZE_BOUNDARY
775 /* Packed structures don't need to have minimum size. */
776 if (! TYPE_PACKED (rec))
777 record_align = MAX (record_align, STRUCTURE_SIZE_BOUNDARY);
778 #endif
779
780 /* Get all the virtual base types that this type uses. The
781 TREE_VALUE slot holds the virtual baseclass type. Note that
782 get_vbase_types makes copies of the virtual base BINFOs, so that
783 the vbase_types are unshared. */
784 vbase_types = CLASSTYPE_VBASECLASSES (rec);
785
786 my_friendly_assert (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST, 19970302);
787 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
788
789 nonvirtual_const_size = const_size;
790
791 while (vbase_types)
792 {
793 tree basetype = BINFO_TYPE (vbase_types);
794 tree offset;
795
796 desired_align = TYPE_ALIGN (basetype);
797 record_align = MAX (record_align, desired_align);
798
799 if (const_size == 0)
800 offset = integer_zero_node;
801 else
802 {
803 /* Give each virtual base type the alignment it wants. */
804 const_size = CEIL (const_size, desired_align) * desired_align;
805 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
806 }
807
808 if (CLASSTYPE_VSIZE (basetype) > max)
809 max = CLASSTYPE_VSIZE (basetype);
810 BINFO_OFFSET (vbase_types) = offset;
811
812 /* Every virtual baseclass takes a least a UNIT, so that we can
813 take it's address and get something different for each base. */
814 const_size += MAX (BITS_PER_UNIT,
815 TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype)));
816
817 vbase_types = TREE_CHAIN (vbase_types);
818 }
819
820 if (const_size)
821 {
822 /* Because a virtual base might take a single byte above,
823 we have to re-adjust the total size to make sure it is
824 a multiple of the alignment. */
825 /* Give the whole object the alignment it wants. */
826 const_size = CEIL (const_size, record_align) * record_align;
827 }
828
829 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
830 here, as that is for this class, without any virtual base classes. */
831 TYPE_ALIGN (rec) = record_align;
832 if (const_size != nonvirtual_const_size)
833 {
834 TYPE_SIZE (rec) = size_int (const_size);
835 TYPE_SIZE_UNIT (rec) = size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rec),
836 size_int (BITS_PER_UNIT));
837 }
838
839 /* Now propagate offset information throughout the lattice. */
840 for (i = 0; i < n_baseclasses; i++)
841 {
842 register tree base_binfo = TREE_VEC_ELT (binfos, i);
843 register tree basetype = BINFO_TYPE (base_binfo);
844 tree field = TYPE_FIELDS (rec);
845
846 if (TREE_VIA_VIRTUAL (base_binfo))
847 continue;
848
849 my_friendly_assert (TREE_TYPE (field) == basetype, 23897);
850
851 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
852 cp_warning ("direct base `%T' inaccessible in `%T' due to ambiguity",
853 basetype, rec);
854
855 BINFO_OFFSET (base_binfo)
856 = size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field)),
857 BITS_PER_UNIT));
858 propagate_binfo_offsets (base_binfo, BINFO_OFFSET (base_binfo));
859 TYPE_FIELDS (rec) = TREE_CHAIN (field);
860 }
861
862 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
863 vbase_types = TREE_CHAIN (vbase_types))
864 {
865 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
866 unshare_base_binfos (vbase_types);
867 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
868
869 if (extra_warnings)
870 {
871 tree basetype = BINFO_TYPE (vbase_types);
872 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
873 cp_warning ("virtual base `%T' inaccessible in `%T' due to ambiguity",
874 basetype, rec);
875 }
876 }
877
878 return max;
879 }
880
881 /* If the empty base field in DECL overlaps with a base of the same type in
882 NEWDECL, which is either another base field or the first data field of
883 the class, pad the base just before NEWDECL and return 1. Otherwise,
884 return 0. */
885
886 static int
887 avoid_overlap (decl, newdecl)
888 tree decl, newdecl;
889 {
890 tree field;
891
892 if (newdecl == NULL_TREE
893 || ! types_overlap_p (TREE_TYPE (decl), TREE_TYPE (newdecl)))
894 return 0;
895
896 for (field = decl; TREE_CHAIN (field) && TREE_CHAIN (field) != newdecl;
897 field = TREE_CHAIN (field))
898 ;
899
900 DECL_SIZE (field) = integer_one_node;
901
902 return 1;
903 }
904
905 /* Returns a list of fields to stand in for the base class subobjects
906 of REC. These fields are later removed by layout_basetypes. */
907
908 tree
909 build_base_fields (rec)
910 tree rec;
911 {
912 /* Chain to hold all the new FIELD_DECLs which stand in for base class
913 subobjects. */
914 tree base_decls = NULL_TREE;
915 tree binfos = TYPE_BINFO_BASETYPES (rec);
916 int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
917 tree decl, nextdecl;
918 int i, saw_empty = 0;
919 unsigned int base_align = 0;
920
921 for (i = 0; i < n_baseclasses; ++i)
922 {
923 register tree base_binfo = TREE_VEC_ELT (binfos, i);
924 register tree basetype = BINFO_TYPE (base_binfo);
925
926 if (TYPE_SIZE (basetype) == 0)
927 /* This error is now reported in xref_tag, thus giving better
928 location information. */
929 continue;
930
931 if (TREE_VIA_VIRTUAL (base_binfo))
932 continue;
933
934 decl = build_lang_decl (FIELD_DECL, NULL_TREE, basetype);
935 DECL_ARTIFICIAL (decl) = 1;
936 DECL_FIELD_CONTEXT (decl) = DECL_CLASS_CONTEXT (decl) = rec;
937 DECL_SIZE (decl) = CLASSTYPE_SIZE (basetype);
938 DECL_ALIGN (decl) = CLASSTYPE_ALIGN (basetype);
939 TREE_CHAIN (decl) = base_decls;
940 base_decls = decl;
941
942 if (! flag_new_abi)
943 {
944 /* Brain damage for backwards compatibility. For no good reason,
945 the old layout_basetypes made every base at least as large as
946 the alignment for the bases up to that point, gratuitously
947 wasting space. So we do the same thing here. */
948 base_align = MAX (base_align, DECL_ALIGN (decl));
949 DECL_SIZE (decl)
950 = size_int (MAX (TREE_INT_CST_LOW (DECL_SIZE (decl)),
951 (int) base_align));
952 }
953 else if (DECL_SIZE (decl) == integer_zero_node)
954 saw_empty = 1;
955 }
956
957 /* Reverse the list of fields so we allocate the bases in the proper
958 order. */
959 base_decls = nreverse (base_decls);
960
961 /* In the presence of empty base classes, we run the risk of allocating
962 two objects of the same class on top of one another. Avoid that. */
963 if (flag_new_abi && saw_empty)
964 for (decl = base_decls; decl; decl = TREE_CHAIN (decl))
965 {
966 if (DECL_SIZE (decl) == integer_zero_node)
967 {
968 /* First step through the following bases until we find
969 an overlap or a non-empty base. */
970 for (nextdecl = TREE_CHAIN (decl); nextdecl;
971 nextdecl = TREE_CHAIN (nextdecl))
972 {
973 if (avoid_overlap (decl, nextdecl)
974 || DECL_SIZE (nextdecl) != integer_zero_node)
975 goto nextbase;
976 }
977
978 /* If we're still looking, also check against the first
979 field. */
980 for (nextdecl = TYPE_FIELDS (rec);
981 nextdecl && TREE_CODE (nextdecl) != FIELD_DECL;
982 nextdecl = TREE_CHAIN (nextdecl))
983 /* keep looking */;
984 avoid_overlap (decl, nextdecl);
985 }
986 nextbase:;
987 }
988
989 return base_decls;
990 }
991
992 /* Returns list of virtual base class pointers in a FIELD_DECL chain. */
993
994 tree
995 build_vbase_pointer_fields (rec)
996 tree rec;
997 {
998 /* Chain to hold all the new FIELD_DECLs which point at virtual
999 base classes. */
1000 tree vbase_decls = NULL_TREE;
1001 tree binfos = TYPE_BINFO_BASETYPES (rec);
1002 int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
1003 tree decl;
1004 int i;
1005
1006 /* Handle basetypes almost like fields, but record their
1007 offsets differently. */
1008
1009 for (i = 0; i < n_baseclasses; i++)
1010 {
1011 register tree base_binfo = TREE_VEC_ELT (binfos, i);
1012 register tree basetype = BINFO_TYPE (base_binfo);
1013
1014 if (TYPE_SIZE (basetype) == 0)
1015 /* This error is now reported in xref_tag, thus giving better
1016 location information. */
1017 continue;
1018
1019 /* All basetypes are recorded in the association list of the
1020 derived type. */
1021
1022 if (TREE_VIA_VIRTUAL (base_binfo))
1023 {
1024 int j;
1025 const char *name;
1026
1027 /* The offset for a virtual base class is only used in computing
1028 virtual function tables and for initializing virtual base
1029 pointers. It is built once `get_vbase_types' is called. */
1030
1031 /* If this basetype can come from another vbase pointer
1032 without an additional indirection, we will share
1033 that pointer. If an indirection is involved, we
1034 make our own pointer. */
1035 for (j = 0; j < n_baseclasses; j++)
1036 {
1037 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
1038 if (! TREE_VIA_VIRTUAL (other_base_binfo)
1039 && binfo_member (basetype,
1040 CLASSTYPE_VBASECLASSES (BINFO_TYPE
1041 (other_base_binfo))
1042 ))
1043 goto got_it;
1044 }
1045 FORMAT_VBASE_NAME (name, basetype);
1046 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
1047 build_pointer_type (basetype));
1048 /* If you change any of the below, take a look at all the
1049 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
1050 them too. */
1051 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
1052 DECL_VIRTUAL_P (decl) = 1;
1053 DECL_ARTIFICIAL (decl) = 1;
1054 DECL_FIELD_CONTEXT (decl) = rec;
1055 DECL_CLASS_CONTEXT (decl) = rec;
1056 DECL_FCONTEXT (decl) = basetype;
1057 DECL_SAVED_INSNS (decl) = 0;
1058 DECL_FIELD_SIZE (decl) = 0;
1059 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
1060 TREE_CHAIN (decl) = vbase_decls;
1061 BINFO_VPTR_FIELD (base_binfo) = decl;
1062 vbase_decls = decl;
1063
1064 got_it:
1065 /* The space this decl occupies has already been accounted for. */
1066 ;
1067 }
1068 }
1069
1070 return vbase_decls;
1071 }
1072 \f
1073 /* Hashing of lists so that we don't make duplicates.
1074 The entry point is `list_hash_canon'. */
1075
1076 /* Each hash table slot is a bucket containing a chain
1077 of these structures. */
1078
1079 struct list_hash
1080 {
1081 struct list_hash *next; /* Next structure in the bucket. */
1082 int hashcode; /* Hash code of this list. */
1083 tree list; /* The list recorded here. */
1084 };
1085
1086 /* Now here is the hash table. When recording a list, it is added
1087 to the slot whose index is the hash code mod the table size.
1088 Note that the hash table is used for several kinds of lists.
1089 While all these live in the same table, they are completely independent,
1090 and the hash code is computed differently for each of these. */
1091
1092 #define TYPE_HASH_SIZE 59
1093 static struct list_hash *list_hash_table[TYPE_HASH_SIZE];
1094
1095 /* Compute a hash code for a list (chain of TREE_LIST nodes
1096 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
1097 TREE_COMMON slots), by adding the hash codes of the individual entries. */
1098
1099 static int
1100 list_hash (purpose, value, chain)
1101 tree purpose, value, chain;
1102 {
1103 register int hashcode = 0;
1104
1105 if (chain)
1106 hashcode += TYPE_HASH (chain);
1107
1108 if (value)
1109 hashcode += TYPE_HASH (value);
1110 else
1111 hashcode += 1007;
1112 if (purpose)
1113 hashcode += TYPE_HASH (purpose);
1114 else
1115 hashcode += 1009;
1116 return hashcode;
1117 }
1118
1119 /* Look in the type hash table for a type isomorphic to TYPE.
1120 If one is found, return it. Otherwise return 0. */
1121
1122 static tree
1123 list_hash_lookup (hashcode, purpose, value, chain)
1124 int hashcode;
1125 tree purpose, value, chain;
1126 {
1127 register struct list_hash *h;
1128
1129 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1130 if (h->hashcode == hashcode
1131 && TREE_PURPOSE (h->list) == purpose
1132 && TREE_VALUE (h->list) == value
1133 && TREE_CHAIN (h->list) == chain)
1134 return h->list;
1135 return 0;
1136 }
1137
1138 /* Add an entry to the list-hash-table
1139 for a list TYPE whose hash code is HASHCODE. */
1140
1141 static void
1142 list_hash_add (hashcode, list)
1143 int hashcode;
1144 tree list;
1145 {
1146 register struct list_hash *h;
1147
1148 h = (struct list_hash *) obstack_alloc (&permanent_obstack, sizeof (struct list_hash));
1149 h->hashcode = hashcode;
1150 h->list = list;
1151 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
1152 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1153 }
1154
1155 /* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical
1156 object for an identical list if one already exists. Otherwise, build a
1157 new one, and record it as the canonical object. */
1158
1159 /* Set to 1 to debug without canonicalization. Never set by program. */
1160
1161 static int debug_no_list_hash = 0;
1162
1163 tree
1164 hash_tree_cons (purpose, value, chain)
1165 tree purpose, value, chain;
1166 {
1167 tree t;
1168 int hashcode = 0;
1169
1170 if (! debug_no_list_hash)
1171 {
1172 hashcode = list_hash (purpose, value, chain);
1173 t = list_hash_lookup (hashcode, purpose, value, chain);
1174 if (t)
1175 return t;
1176 }
1177
1178 t = tree_cons (purpose, value, chain);
1179
1180 /* If this is a new list, record it for later reuse. */
1181 if (! debug_no_list_hash)
1182 list_hash_add (hashcode, t);
1183
1184 return t;
1185 }
1186
1187 /* Constructor for hashed lists. */
1188
1189 tree
1190 hash_tree_chain (value, chain)
1191 tree value, chain;
1192 {
1193 return hash_tree_cons (NULL_TREE, value, chain);
1194 }
1195
1196 /* Similar, but used for concatenating two lists. */
1197
1198 tree
1199 hash_chainon (list1, list2)
1200 tree list1, list2;
1201 {
1202 if (list2 == 0)
1203 return list1;
1204 if (list1 == 0)
1205 return list2;
1206 if (TREE_CHAIN (list1) == NULL_TREE)
1207 return hash_tree_chain (TREE_VALUE (list1), list2);
1208 return hash_tree_chain (TREE_VALUE (list1),
1209 hash_chainon (TREE_CHAIN (list1), list2));
1210 }
1211 \f
1212 /* Build an association between TYPE and some parameters:
1213
1214 OFFSET is the offset added to `this' to convert it to a pointer
1215 of type `TYPE *'
1216
1217 BINFO is the base binfo to use, if we are deriving from one. This
1218 is necessary, as we want specialized parent binfos from base
1219 classes, so that the VTABLE_NAMEs of bases are for the most derived
1220 type, instead of the simple type.
1221
1222 VTABLE is the virtual function table with which to initialize
1223 sub-objects of type TYPE.
1224
1225 VIRTUALS are the virtual functions sitting in VTABLE. */
1226
1227 tree
1228 make_binfo (offset, binfo, vtable, virtuals)
1229 tree offset, binfo;
1230 tree vtable, virtuals;
1231 {
1232 tree new_binfo = make_tree_vec (7);
1233 tree type;
1234
1235 if (TREE_CODE (binfo) == TREE_VEC)
1236 type = BINFO_TYPE (binfo);
1237 else
1238 {
1239 type = binfo;
1240 binfo = CLASS_TYPE_P (type) ? TYPE_BINFO (binfo) : NULL_TREE;
1241 }
1242
1243 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1244 BINFO_OFFSET (new_binfo) = offset;
1245 BINFO_VTABLE (new_binfo) = vtable;
1246 BINFO_VIRTUALS (new_binfo) = virtuals;
1247 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1248
1249 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1250 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1251 return new_binfo;
1252 }
1253
1254 /* Return the binfo value for ELEM in TYPE. */
1255
1256 tree
1257 binfo_value (elem, type)
1258 tree elem;
1259 tree type;
1260 {
1261 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1262 compiler_error ("base class `%s' ambiguous in binfo_value",
1263 TYPE_NAME_STRING (elem));
1264 if (elem == type)
1265 return TYPE_BINFO (type);
1266 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1267 return type;
1268 return get_binfo (elem, type, 0);
1269 }
1270
1271 /* Return a reversed copy of the BINFO-chain given by PATH. (If the
1272 BINFO_INHERITANCE_CHAIN points from base classes to derived
1273 classes, it will instead point from derived classes to base
1274 classes.) Returns the first node in the reversed chain. */
1275
1276 tree
1277 reverse_path (path)
1278 tree path;
1279 {
1280 register tree prev = NULL_TREE, cur;
1281 for (cur = path; cur; cur = BINFO_INHERITANCE_CHAIN (cur))
1282 {
1283 tree r = copy_node (cur);
1284 BINFO_INHERITANCE_CHAIN (r) = prev;
1285 prev = r;
1286 }
1287 return prev;
1288 }
1289
1290 void
1291 debug_binfo (elem)
1292 tree elem;
1293 {
1294 unsigned HOST_WIDE_INT n;
1295 tree virtuals;
1296
1297 fprintf (stderr, "type \"%s\"; offset = %ld\n",
1298 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1299 (long) TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1300 fprintf (stderr, "vtable type:\n");
1301 debug_tree (BINFO_TYPE (elem));
1302 if (BINFO_VTABLE (elem))
1303 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1304 else
1305 fprintf (stderr, "no vtable decl yet\n");
1306 fprintf (stderr, "virtuals:\n");
1307 virtuals = BINFO_VIRTUALS (elem);
1308
1309 n = skip_rtti_stuff (&virtuals, BINFO_TYPE (elem));
1310
1311 while (virtuals)
1312 {
1313 tree fndecl = TREE_VALUE (virtuals);
1314 fprintf (stderr, "%s [%ld =? %ld]\n",
1315 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1316 (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1317 ++n;
1318 virtuals = TREE_CHAIN (virtuals);
1319 }
1320 }
1321
1322 int
1323 count_functions (t)
1324 tree t;
1325 {
1326 int i;
1327 if (TREE_CODE (t) == FUNCTION_DECL)
1328 return 1;
1329 else if (TREE_CODE (t) == OVERLOAD)
1330 {
1331 for (i=0; t; t = OVL_CHAIN (t))
1332 i++;
1333 return i;
1334 }
1335
1336 my_friendly_abort (359);
1337 return 0;
1338 }
1339
1340 int
1341 is_overloaded_fn (x)
1342 tree x;
1343 {
1344 /* A baselink is also considered an overloaded function. */
1345 if (TREE_CODE (x) == OFFSET_REF)
1346 x = TREE_OPERAND (x, 1);
1347 if (BASELINK_P (x))
1348 x = TREE_VALUE (x);
1349 return (TREE_CODE (x) == FUNCTION_DECL
1350 || TREE_CODE (x) == TEMPLATE_ID_EXPR
1351 || DECL_FUNCTION_TEMPLATE_P (x)
1352 || TREE_CODE (x) == OVERLOAD);
1353 }
1354
1355 int
1356 really_overloaded_fn (x)
1357 tree x;
1358 {
1359 /* A baselink is also considered an overloaded function. */
1360 if (TREE_CODE (x) == OFFSET_REF)
1361 x = TREE_OPERAND (x, 1);
1362 if (BASELINK_P (x))
1363 x = TREE_VALUE (x);
1364 return (TREE_CODE (x) == OVERLOAD
1365 && (TREE_CHAIN (x) != NULL_TREE
1366 || DECL_FUNCTION_TEMPLATE_P (OVL_FUNCTION (x))));
1367 }
1368
1369 tree
1370 get_first_fn (from)
1371 tree from;
1372 {
1373 my_friendly_assert (is_overloaded_fn (from), 9);
1374 /* A baselink is also considered an overloaded function. */
1375 if (BASELINK_P (from))
1376 from = TREE_VALUE (from);
1377 return OVL_CURRENT (from);
1378 }
1379
1380 /* Returns nonzero if T is a ->* or .* expression that refers to a
1381 member function. */
1382
1383 int
1384 bound_pmf_p (t)
1385 tree t;
1386 {
1387 return (TREE_CODE (t) == OFFSET_REF
1388 && TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (t, 1))));
1389 }
1390
1391 /* Return a new OVL node, concatenating it with the old one. */
1392
1393 tree
1394 ovl_cons (decl, chain)
1395 tree decl;
1396 tree chain;
1397 {
1398 tree result = make_node (OVERLOAD);
1399 TREE_TYPE (result) = unknown_type_node;
1400 OVL_FUNCTION (result) = decl;
1401 TREE_CHAIN (result) = chain;
1402
1403 return result;
1404 }
1405
1406 /* Build a new overloaded function. If this is the first one,
1407 just return it; otherwise, ovl_cons the _DECLs */
1408
1409 tree
1410 build_overload (decl, chain)
1411 tree decl;
1412 tree chain;
1413 {
1414 if (! chain && TREE_CODE (decl) != TEMPLATE_DECL)
1415 return decl;
1416 if (chain && TREE_CODE (chain) != OVERLOAD)
1417 chain = ovl_cons (chain, NULL_TREE);
1418 return ovl_cons (decl, chain);
1419 }
1420
1421 /* True if fn is in ovl. */
1422
1423 int
1424 ovl_member (fn, ovl)
1425 tree fn;
1426 tree ovl;
1427 {
1428 if (ovl == NULL_TREE)
1429 return 0;
1430 if (TREE_CODE (ovl) != OVERLOAD)
1431 return ovl == fn;
1432 for (; ovl; ovl = OVL_CHAIN (ovl))
1433 if (OVL_FUNCTION (ovl) == fn)
1434 return 1;
1435 return 0;
1436 }
1437
1438 int
1439 is_aggr_type_2 (t1, t2)
1440 tree t1, t2;
1441 {
1442 if (TREE_CODE (t1) != TREE_CODE (t2))
1443 return 0;
1444 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1445 }
1446
1447 /* Returns non-zero if CODE is the code for a statement. */
1448
1449 static int
1450 statement_code_p (code)
1451 enum tree_code code;
1452 {
1453 switch (code)
1454 {
1455 case EXPR_STMT:
1456 case COMPOUND_STMT:
1457 case DECL_STMT:
1458 case IF_STMT:
1459 case FOR_STMT:
1460 case WHILE_STMT:
1461 case DO_STMT:
1462 case RETURN_STMT:
1463 case BREAK_STMT:
1464 case CONTINUE_STMT:
1465 case SWITCH_STMT:
1466 case GOTO_STMT:
1467 case LABEL_STMT:
1468 case ASM_STMT:
1469 case SUBOBJECT:
1470 case CLEANUP_STMT:
1471 case START_CATCH_STMT:
1472 case CTOR_STMT:
1473 case SCOPE_STMT:
1474 case CTOR_INITIALIZER:
1475 case CASE_LABEL:
1476 case RETURN_INIT:
1477 case TRY_BLOCK:
1478 case HANDLER:
1479 return 1;
1480
1481 default:
1482 return 0;
1483 }
1484 }
1485 \f
1486 #define PRINT_RING_SIZE 4
1487
1488 const char *
1489 lang_printable_name (decl, v)
1490 tree decl;
1491 int v;
1492 {
1493 static tree decl_ring[PRINT_RING_SIZE];
1494 static char *print_ring[PRINT_RING_SIZE];
1495 static int ring_counter;
1496 int i;
1497
1498 /* Only cache functions. */
1499 if (v < 2
1500 || TREE_CODE (decl) != FUNCTION_DECL
1501 || DECL_LANG_SPECIFIC (decl) == 0)
1502 return lang_decl_name (decl, v);
1503
1504 /* See if this print name is lying around. */
1505 for (i = 0; i < PRINT_RING_SIZE; i++)
1506 if (decl_ring[i] == decl)
1507 /* yes, so return it. */
1508 return print_ring[i];
1509
1510 if (++ring_counter == PRINT_RING_SIZE)
1511 ring_counter = 0;
1512
1513 if (current_function_decl != NULL_TREE)
1514 {
1515 if (decl_ring[ring_counter] == current_function_decl)
1516 ring_counter += 1;
1517 if (ring_counter == PRINT_RING_SIZE)
1518 ring_counter = 0;
1519 if (decl_ring[ring_counter] == current_function_decl)
1520 my_friendly_abort (106);
1521 }
1522
1523 if (print_ring[ring_counter])
1524 free (print_ring[ring_counter]);
1525
1526 print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v));
1527 decl_ring[ring_counter] = decl;
1528 return print_ring[ring_counter];
1529 }
1530 \f
1531 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1532 listed in RAISES. */
1533
1534 tree
1535 build_exception_variant (type, raises)
1536 tree type;
1537 tree raises;
1538 {
1539 tree v = TYPE_MAIN_VARIANT (type);
1540 int type_quals = TYPE_QUALS (type);
1541
1542 for (; v; v = TYPE_NEXT_VARIANT (v))
1543 if (TYPE_QUALS (v) == type_quals
1544 && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1))
1545 return v;
1546
1547 /* Need to build a new variant. */
1548 v = build_type_copy (type);
1549 TYPE_RAISES_EXCEPTIONS (v) = raises;
1550 return v;
1551 }
1552
1553 /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new one together with its
1554 lang_specific field and its corresponding TEMPLATE_DECL node */
1555
1556 tree
1557 copy_template_template_parm (t)
1558 tree t;
1559 {
1560 tree template = TYPE_NAME (t);
1561 tree t2;
1562
1563 t2 = make_lang_type (TEMPLATE_TEMPLATE_PARM);
1564 template = copy_node (template);
1565 copy_lang_decl (template);
1566
1567 TREE_TYPE (template) = t2;
1568 TYPE_NAME (t2) = template;
1569 TYPE_STUB_DECL (t2) = template;
1570
1571 /* No need to copy these */
1572 TYPE_FIELDS (t2) = TYPE_FIELDS (t);
1573 TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2)
1574 = TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t);
1575 return t2;
1576 }
1577
1578 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1579 FUNC is called with the DATA and the address of each sub-tree. If
1580 FUNC returns a non-NULL value, the traversal is aborted, and the
1581 value returned by FUNC is returned. */
1582
1583 tree
1584 walk_tree (tp, func, data)
1585 tree *tp;
1586 walk_tree_fn func;
1587 void *data;
1588 {
1589 enum tree_code code;
1590 int walk_subtrees;
1591 tree result;
1592
1593 #define WALK_SUBTREE(NODE) \
1594 do \
1595 { \
1596 result = walk_tree (&(NODE), func, data); \
1597 if (result) \
1598 return result; \
1599 } \
1600 while (0)
1601
1602 /* Skip empty subtrees. */
1603 if (!*tp)
1604 return NULL_TREE;
1605
1606 /* Call the function. */
1607 walk_subtrees = 1;
1608 result = (*func) (tp, &walk_subtrees, data);
1609
1610 /* If we found something, return it. */
1611 if (result)
1612 return result;
1613
1614 /* Even if we didn't, FUNC may have decided that there was nothing
1615 interesting below this point in the tree. */
1616 if (!walk_subtrees)
1617 return NULL_TREE;
1618
1619 code = TREE_CODE (*tp);
1620
1621 /* Handle common cases up front. */
1622 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1623 || TREE_CODE_CLASS (code) == 'r'
1624 || TREE_CODE_CLASS (code) == 's')
1625 {
1626 int i;
1627
1628 /* Walk over all the sub-trees of this operand. */
1629 for (i = first_rtl_op (code) - 1; i >= 0; --i)
1630 WALK_SUBTREE (TREE_OPERAND (*tp, i));
1631
1632 /* For statements, we also walk the chain so that we cover the
1633 entire statement tree. */
1634 if (statement_code_p (code))
1635 {
1636 if (code == DECL_STMT
1637 && DECL_STMT_DECL (*tp)
1638 && TREE_CODE_CLASS (TREE_CODE (DECL_STMT_DECL (*tp))) == 'd')
1639 {
1640 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1641 into declarations that are just mentioned, rather than
1642 declared; they don't really belong to this part of the tree.
1643 And, we can see cycles: the initializer for a declaration can
1644 refer to the declaration itself. */
1645 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1646 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1647 }
1648
1649 WALK_SUBTREE (TREE_CHAIN (*tp));
1650 }
1651
1652 /* We didn't find what we were looking for. */
1653 return NULL_TREE;
1654 }
1655 else if (TREE_CODE_CLASS (code) == 'd')
1656 {
1657 WALK_SUBTREE (TREE_TYPE (*tp));
1658
1659 /* We didn't find what we were looking for. */
1660 return NULL_TREE;
1661 }
1662
1663 /* Not one of the easy cases. We must explicitly go through the
1664 children. */
1665 switch (code)
1666 {
1667 case ERROR_MARK:
1668 case IDENTIFIER_NODE:
1669 case INTEGER_CST:
1670 case REAL_CST:
1671 case STRING_CST:
1672 case DEFAULT_ARG:
1673 case TEMPLATE_TEMPLATE_PARM:
1674 case TEMPLATE_PARM_INDEX:
1675 case TEMPLATE_TYPE_PARM:
1676 case REAL_TYPE:
1677 case COMPLEX_TYPE:
1678 case VOID_TYPE:
1679 case BOOLEAN_TYPE:
1680 case TYPENAME_TYPE:
1681 case UNION_TYPE:
1682 case ENUMERAL_TYPE:
1683 case TYPEOF_TYPE:
1684 case BLOCK:
1685 /* None of thse have subtrees other than those already walked
1686 above. */
1687 break;
1688
1689 case PTRMEM_CST:
1690 WALK_SUBTREE (TREE_TYPE (*tp));
1691 break;
1692
1693 case POINTER_TYPE:
1694 case REFERENCE_TYPE:
1695 WALK_SUBTREE (TREE_TYPE (*tp));
1696 break;
1697
1698 case TREE_LIST:
1699 WALK_SUBTREE (TREE_PURPOSE (*tp));
1700 WALK_SUBTREE (TREE_VALUE (*tp));
1701 WALK_SUBTREE (TREE_CHAIN (*tp));
1702 break;
1703
1704 case OVERLOAD:
1705 WALK_SUBTREE (OVL_FUNCTION (*tp));
1706 WALK_SUBTREE (OVL_CHAIN (*tp));
1707 break;
1708
1709 case TREE_VEC:
1710 {
1711 int len = TREE_VEC_LENGTH (*tp);
1712 while (len--)
1713 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1714 }
1715 break;
1716
1717 case COMPLEX_CST:
1718 WALK_SUBTREE (TREE_REALPART (*tp));
1719 WALK_SUBTREE (TREE_IMAGPART (*tp));
1720 break;
1721
1722 case CONSTRUCTOR:
1723 WALK_SUBTREE (CONSTRUCTOR_ELTS (*tp));
1724 break;
1725
1726 case METHOD_TYPE:
1727 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1728 /* Fall through. */
1729
1730 case FUNCTION_TYPE:
1731 WALK_SUBTREE (TREE_TYPE (*tp));
1732 WALK_SUBTREE (TYPE_ARG_TYPES (*tp));
1733 break;
1734
1735 case ARRAY_TYPE:
1736 WALK_SUBTREE (TREE_TYPE (*tp));
1737 WALK_SUBTREE (TYPE_DOMAIN (*tp));
1738 break;
1739
1740 case INTEGER_TYPE:
1741 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1742 WALK_SUBTREE (TYPE_MAX_VALUE (*tp));
1743 break;
1744
1745 case OFFSET_TYPE:
1746 WALK_SUBTREE (TREE_TYPE (*tp));
1747 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (*tp));
1748 break;
1749
1750 case RECORD_TYPE:
1751 if (TYPE_PTRMEMFUNC_P (*tp))
1752 WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp));
1753 break;
1754
1755 default:
1756 my_friendly_abort (19990803);
1757 }
1758
1759 /* We didn't find what we were looking for. */
1760 return NULL_TREE;
1761
1762 #undef WALK_SUBTREE
1763 }
1764
1765 /* Passed to walk_tree. Checks for the use of types with no linkage. */
1766
1767 static tree
1768 no_linkage_helper (tp, walk_subtrees, data)
1769 tree *tp;
1770 int *walk_subtrees ATTRIBUTE_UNUSED;
1771 void *data ATTRIBUTE_UNUSED;
1772 {
1773 tree t = *tp;
1774
1775 if (TYPE_P (t)
1776 && (IS_AGGR_TYPE (t) || TREE_CODE (t) == ENUMERAL_TYPE)
1777 && (decl_function_context (TYPE_MAIN_DECL (t))
1778 || ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))))
1779 return t;
1780 return NULL_TREE;
1781 }
1782
1783 /* Check if the type T depends on a type with no linkage and if so, return
1784 it. */
1785
1786 tree
1787 no_linkage_check (t)
1788 tree t;
1789 {
1790 /* There's no point in checking linkage on template functions; we
1791 can't know their complete types. */
1792 if (processing_template_decl)
1793 return NULL_TREE;
1794
1795 t = walk_tree (&t, no_linkage_helper, NULL);
1796 if (t != error_mark_node)
1797 return t;
1798 return NULL_TREE;
1799 }
1800
1801 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1802
1803 tree
1804 copy_tree_r (tp, walk_subtrees, data)
1805 tree *tp;
1806 int *walk_subtrees;
1807 void *data ATTRIBUTE_UNUSED;
1808 {
1809 enum tree_code code = TREE_CODE (*tp);
1810
1811 /* We make copies of most nodes. */
1812 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1813 || TREE_CODE_CLASS (code) == 'r'
1814 || TREE_CODE_CLASS (code) == 'c'
1815 || TREE_CODE_CLASS (code) == 's'
1816 || code == PARM_DECL
1817 || code == TREE_LIST
1818 || code == TREE_VEC
1819 || code == OVERLOAD)
1820 {
1821 /* Because the chain gets clobbered when we make a copy, we save it
1822 here. */
1823 tree chain = TREE_CHAIN (*tp);
1824
1825 /* Copy the node. */
1826 *tp = copy_node (*tp);
1827
1828 /* Now, restore the chain, if appropriate. That will cause
1829 walk_tree to walk into the chain as well. */
1830 if (code == PARM_DECL || code == TREE_LIST || code == OVERLOAD
1831 || statement_code_p (code))
1832 TREE_CHAIN (*tp) = chain;
1833
1834 /* For now, we don't update BLOCKs when we make copies. So, we
1835 have to nullify all scope-statements. */
1836 if (TREE_CODE (*tp) == SCOPE_STMT)
1837 SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
1838 }
1839 else if (code == TEMPLATE_TEMPLATE_PARM)
1840 /* These must be copied specially. */
1841 *tp = copy_template_template_parm (*tp);
1842 else if (TREE_CODE_CLASS (code) == 't')
1843 /* There's no need to copy types, or anything beneath them. */
1844 *walk_subtrees = 0;
1845
1846 return NULL_TREE;
1847 }
1848
1849 #ifdef GATHER_STATISTICS
1850 extern int depth_reached;
1851 #endif
1852
1853 void
1854 print_lang_statistics ()
1855 {
1856 print_search_statistics ();
1857 print_class_statistics ();
1858 #ifdef GATHER_STATISTICS
1859 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
1860 depth_reached);
1861 #endif
1862 }
1863
1864 /* This is used by the `assert' macro. It is provided in libgcc.a,
1865 which `cc' doesn't know how to link. Note that the C++ front-end
1866 no longer actually uses the `assert' macro (instead, it calls
1867 my_friendly_assert). But all of the back-end files still need this. */
1868
1869 void
1870 __eprintf (string, expression, line, filename)
1871 const char *string;
1872 const char *expression;
1873 unsigned line;
1874 const char *filename;
1875 {
1876 fprintf (stderr, string, expression, line, filename);
1877 fflush (stderr);
1878 abort ();
1879 }
1880
1881 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1882 (which is an ARRAY_TYPE). This counts only elements of the top
1883 array. */
1884
1885 tree
1886 array_type_nelts_top (type)
1887 tree type;
1888 {
1889 return fold (build (PLUS_EXPR, sizetype,
1890 array_type_nelts (type),
1891 integer_one_node));
1892 }
1893
1894 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1895 (which is an ARRAY_TYPE). This one is a recursive count of all
1896 ARRAY_TYPEs that are clumped together. */
1897
1898 tree
1899 array_type_nelts_total (type)
1900 tree type;
1901 {
1902 tree sz = array_type_nelts_top (type);
1903 type = TREE_TYPE (type);
1904 while (TREE_CODE (type) == ARRAY_TYPE)
1905 {
1906 tree n = array_type_nelts_top (type);
1907 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1908 type = TREE_TYPE (type);
1909 }
1910 return sz;
1911 }
1912
1913 /* Called from break_out_target_exprs via mapcar. */
1914
1915 static tree
1916 bot_manip (tp, walk_subtrees, data)
1917 tree *tp;
1918 int *walk_subtrees;
1919 void *data;
1920 {
1921 splay_tree target_remap = ((splay_tree) data);
1922 tree t = *tp;
1923
1924 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1925 {
1926 /* There can't be any TARGET_EXPRs below this point. */
1927 *walk_subtrees = 0;
1928 return NULL_TREE;
1929 }
1930 else if (TREE_CODE (t) == TARGET_EXPR)
1931 {
1932 tree u;
1933
1934 if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
1935 {
1936 mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0));
1937 u = build_cplus_new
1938 (TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1)));
1939 }
1940 else
1941 {
1942 u = copy_node (t);
1943 TREE_OPERAND (u, 0) = build (VAR_DECL, TREE_TYPE (t));
1944 layout_decl (TREE_OPERAND (u, 0), 0);
1945 }
1946
1947 /* Map the old variable to the new one. */
1948 splay_tree_insert (target_remap,
1949 (splay_tree_key) TREE_OPERAND (t, 0),
1950 (splay_tree_value) TREE_OPERAND (u, 0));
1951
1952 /* Replace the old expression with the new version. */
1953 *tp = u;
1954 /* We don't have to go below this point; the recursive call to
1955 break_out_target_exprs will have handled anything below this
1956 point. */
1957 *walk_subtrees = 0;
1958 return NULL_TREE;
1959 }
1960 else if (TREE_CODE (t) == CALL_EXPR)
1961 mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
1962
1963 /* Make a copy of this node. */
1964 return copy_tree_r (tp, walk_subtrees, NULL);
1965 }
1966
1967 /* Replace all remapped VAR_DECLs in T with their new equivalents.
1968 DATA is really a splay-tree mapping old variables to new
1969 variables. */
1970
1971 static tree
1972 bot_replace (t, walk_subtrees, data)
1973 tree *t;
1974 int *walk_subtrees ATTRIBUTE_UNUSED;
1975 void *data;
1976 {
1977 splay_tree target_remap = ((splay_tree) data);
1978
1979 if (TREE_CODE (*t) == VAR_DECL)
1980 {
1981 splay_tree_node n = splay_tree_lookup (target_remap,
1982 (splay_tree_key) *t);
1983 if (n)
1984 *t = (tree) n->value;
1985 }
1986
1987 return NULL_TREE;
1988 }
1989
1990 /* When we parse a default argument expression, we may create
1991 temporary variables via TARGET_EXPRs. When we actually use the
1992 default-argument expression, we make a copy of the expression, but
1993 we must replace the temporaries with appropriate local versions. */
1994
1995 tree
1996 break_out_target_exprs (t)
1997 tree t;
1998 {
1999 static int target_remap_count;
2000 static splay_tree target_remap;
2001
2002 if (!target_remap_count++)
2003 target_remap = splay_tree_new (splay_tree_compare_pointers,
2004 /*splay_tree_delete_key_fn=*/NULL,
2005 /*splay_tree_delete_value_fn=*/NULL);
2006 walk_tree (&t, bot_manip, target_remap);
2007 walk_tree (&t, bot_replace, target_remap);
2008
2009 if (!--target_remap_count)
2010 {
2011 splay_tree_delete (target_remap);
2012 target_remap = NULL;
2013 }
2014
2015 return t;
2016 }
2017
2018 /* Obstack used for allocating nodes in template function and variable
2019 definitions. */
2020
2021 /* Similar to `build_nt', except that we set TREE_COMPLEXITY to be the
2022 current line number. */
2023
2024 tree
2025 build_min_nt VPROTO((enum tree_code code, ...))
2026 {
2027 #ifndef ANSI_PROTOTYPES
2028 enum tree_code code;
2029 #endif
2030 va_list p;
2031 register tree t;
2032 register int length;
2033 register int i;
2034
2035 VA_START (p, code);
2036
2037 #ifndef ANSI_PROTOTYPES
2038 code = va_arg (p, enum tree_code);
2039 #endif
2040
2041 t = make_node (code);
2042 length = tree_code_length[(int) code];
2043 TREE_COMPLEXITY (t) = lineno;
2044
2045 for (i = 0; i < length; i++)
2046 {
2047 tree x = va_arg (p, tree);
2048 TREE_OPERAND (t, i) = x;
2049 }
2050
2051 va_end (p);
2052 return t;
2053 }
2054
2055 /* Similar to `build', except we set TREE_COMPLEXITY to the current
2056 line-number. */
2057
2058 tree
2059 build_min VPROTO((enum tree_code code, tree tt, ...))
2060 {
2061 #ifndef ANSI_PROTOTYPES
2062 enum tree_code code;
2063 tree tt;
2064 #endif
2065 va_list p;
2066 register tree t;
2067 register int length;
2068 register int i;
2069
2070 VA_START (p, tt);
2071
2072 #ifndef ANSI_PROTOTYPES
2073 code = va_arg (p, enum tree_code);
2074 tt = va_arg (p, tree);
2075 #endif
2076
2077 t = make_node (code);
2078 length = tree_code_length[(int) code];
2079 TREE_TYPE (t) = tt;
2080 TREE_COMPLEXITY (t) = lineno;
2081
2082 for (i = 0; i < length; i++)
2083 {
2084 tree x = va_arg (p, tree);
2085 TREE_OPERAND (t, i) = x;
2086 }
2087
2088 va_end (p);
2089 return t;
2090 }
2091
2092 tree
2093 get_type_decl (t)
2094 tree t;
2095 {
2096 if (TREE_CODE (t) == TYPE_DECL)
2097 return t;
2098 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
2099 return TYPE_STUB_DECL (t);
2100
2101 my_friendly_abort (42);
2102
2103 /* Stop compiler from complaining control reaches end of non-void function. */
2104 return 0;
2105 }
2106
2107 int
2108 can_free (obstack, t)
2109 struct obstack *obstack;
2110 tree t;
2111 {
2112 int size = 0;
2113
2114 if (TREE_CODE (t) == TREE_VEC)
2115 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
2116 else
2117 my_friendly_abort (42);
2118
2119 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
2120 & ~ obstack_alignment_mask (obstack))
2121 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
2122 return 1;
2123 #undef ROUND
2124
2125 return 0;
2126 }
2127
2128 /* Return first vector element whose BINFO_TYPE is ELEM.
2129 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
2130
2131 tree
2132 vec_binfo_member (elem, vec)
2133 tree elem, vec;
2134 {
2135 int i;
2136
2137 if (vec)
2138 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
2139 if (same_type_p (elem, BINFO_TYPE (TREE_VEC_ELT (vec, i))))
2140 return TREE_VEC_ELT (vec, i);
2141
2142 return NULL_TREE;
2143 }
2144
2145 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2146 the wrong thing for decl_function_context. Hopefully the uses in the
2147 backend won't matter, since we don't need a static chain for local class
2148 methods. FIXME! */
2149
2150 tree
2151 hack_decl_function_context (decl)
2152 tree decl;
2153 {
2154 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
2155 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
2156 return decl_function_context (decl);
2157 }
2158
2159 /* Returns the namespace that contains DECL, whether directly or
2160 indirectly. */
2161
2162 tree
2163 decl_namespace_context (decl)
2164 tree decl;
2165 {
2166 while (1)
2167 {
2168 if (TREE_CODE (decl) == NAMESPACE_DECL)
2169 return decl;
2170 else if (TYPE_P (decl))
2171 decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl));
2172 else
2173 decl = CP_DECL_CONTEXT (decl);
2174 }
2175 }
2176
2177 /* Return truthvalue of whether T1 is the same tree structure as T2.
2178 Return 1 if they are the same.
2179 Return 0 if they are understandably different.
2180 Return -1 if either contains tree structure not understood by
2181 this function. */
2182
2183 int
2184 cp_tree_equal (t1, t2)
2185 tree t1, t2;
2186 {
2187 register enum tree_code code1, code2;
2188 int cmp;
2189
2190 if (t1 == t2)
2191 return 1;
2192 if (t1 == 0 || t2 == 0)
2193 return 0;
2194
2195 code1 = TREE_CODE (t1);
2196 code2 = TREE_CODE (t2);
2197
2198 if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
2199 {
2200 if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
2201 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2202 else
2203 return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
2204 }
2205 else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
2206 || code2 == NON_LVALUE_EXPR)
2207 return cp_tree_equal (t1, TREE_OPERAND (t2, 0));
2208
2209 if (code1 != code2)
2210 return 0;
2211
2212 switch (code1)
2213 {
2214 case INTEGER_CST:
2215 return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
2216 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);
2217
2218 case REAL_CST:
2219 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
2220
2221 case STRING_CST:
2222 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
2223 && !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2224 TREE_STRING_LENGTH (t1));
2225
2226 case CONSTRUCTOR:
2227 /* We need to do this when determining whether or not two
2228 non-type pointer to member function template arguments
2229 are the same. */
2230 if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
2231 /* The first operand is RTL. */
2232 && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0)))
2233 return 0;
2234 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2235
2236 case TREE_LIST:
2237 cmp = cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2238 if (cmp <= 0)
2239 return cmp;
2240 cmp = cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2));
2241 if (cmp <= 0)
2242 return cmp;
2243 return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
2244
2245 case SAVE_EXPR:
2246 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2247
2248 case CALL_EXPR:
2249 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2250 if (cmp <= 0)
2251 return cmp;
2252 return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2253
2254 case TARGET_EXPR:
2255 /* Special case: if either target is an unallocated VAR_DECL,
2256 it means that it's going to be unified with whatever the
2257 TARGET_EXPR is really supposed to initialize, so treat it
2258 as being equivalent to anything. */
2259 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
2260 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
2261 && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
2262 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
2263 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
2264 && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
2265 cmp = 1;
2266 else
2267 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2268 if (cmp <= 0)
2269 return cmp;
2270 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2271
2272 case WITH_CLEANUP_EXPR:
2273 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2274 if (cmp <= 0)
2275 return cmp;
2276 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));
2277
2278 case COMPONENT_REF:
2279 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
2280 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2281 return 0;
2282
2283 case VAR_DECL:
2284 case PARM_DECL:
2285 case CONST_DECL:
2286 case FUNCTION_DECL:
2287 return 0;
2288
2289 case TEMPLATE_PARM_INDEX:
2290 return TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
2291 && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2);
2292
2293 case SIZEOF_EXPR:
2294 case ALIGNOF_EXPR:
2295 if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
2296 return 0;
2297 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2298 return same_type_p (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2299 break;
2300
2301 case PTRMEM_CST:
2302 /* Two pointer-to-members are the same if they point to the same
2303 field or function in the same class. */
2304 return (PTRMEM_CST_MEMBER (t1) == PTRMEM_CST_MEMBER (t2)
2305 && same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)));
2306
2307 default:
2308 break;
2309 }
2310
2311 switch (TREE_CODE_CLASS (code1))
2312 {
2313 int i;
2314 case '1':
2315 case '2':
2316 case '<':
2317 case 'e':
2318 case 'r':
2319 case 's':
2320 cmp = 1;
2321 for (i=0; i<tree_code_length[(int) code1]; ++i)
2322 {
2323 cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
2324 if (cmp <= 0)
2325 return cmp;
2326 }
2327 return cmp;
2328 }
2329
2330 return -1;
2331 }
2332
2333 /* Build a wrapper around some pointer PTR so we can use it as a tree. */
2334
2335 tree
2336 build_ptr_wrapper (ptr)
2337 void *ptr;
2338 {
2339 tree t = make_node (WRAPPER);
2340 WRAPPER_PTR (t) = ptr;
2341 return t;
2342 }
2343
2344 /* Same, but on the expression_obstack. */
2345
2346 tree
2347 build_expr_ptr_wrapper (ptr)
2348 void *ptr;
2349 {
2350 return build_ptr_wrapper (ptr);
2351 }
2352
2353 /* Build a wrapper around some integer I so we can use it as a tree. */
2354
2355 tree
2356 build_int_wrapper (i)
2357 int i;
2358 {
2359 tree t = make_node (WRAPPER);
2360 WRAPPER_INT (t) = i;
2361 return t;
2362 }
2363
2364 static tree
2365 build_srcloc (file, line)
2366 char *file;
2367 int line;
2368 {
2369 tree t;
2370
2371 t = make_node (SRCLOC);
2372 SRCLOC_FILE (t) = file;
2373 SRCLOC_LINE (t) = line;
2374
2375 return t;
2376 }
2377
2378 tree
2379 build_srcloc_here ()
2380 {
2381 return build_srcloc (input_filename, lineno);
2382 }
2383
2384 /* The type of ARG when used as an lvalue. */
2385
2386 tree
2387 lvalue_type (arg)
2388 tree arg;
2389 {
2390 tree type = TREE_TYPE (arg);
2391 if (TREE_CODE (arg) == OVERLOAD)
2392 type = unknown_type_node;
2393 return type;
2394 }
2395
2396 /* The type of ARG for printing error messages; denote lvalues with
2397 reference types. */
2398
2399 tree
2400 error_type (arg)
2401 tree arg;
2402 {
2403 tree type = TREE_TYPE (arg);
2404 if (TREE_CODE (type) == ARRAY_TYPE)
2405 ;
2406 else if (real_lvalue_p (arg))
2407 type = build_reference_type (lvalue_type (arg));
2408 else if (IS_AGGR_TYPE (type))
2409 type = lvalue_type (arg);
2410
2411 return type;
2412 }
2413
2414 /* Does FUNCTION use a variable-length argument list? */
2415
2416 int
2417 varargs_function_p (function)
2418 tree function;
2419 {
2420 tree parm = TYPE_ARG_TYPES (TREE_TYPE (function));
2421 for (; parm; parm = TREE_CHAIN (parm))
2422 if (TREE_VALUE (parm) == void_type_node)
2423 return 0;
2424 return 1;
2425 }
2426
2427 /* Returns 1 if decl is a member of a class. */
2428
2429 int
2430 member_p (decl)
2431 tree decl;
2432 {
2433 tree ctx = DECL_CONTEXT (decl);
2434 return (ctx && TREE_CODE_CLASS (TREE_CODE (ctx)) == 't');
2435 }
2436
2437 /* Create a placeholder for member access where we don't actually have an
2438 object that the access is against. */
2439
2440 tree
2441 build_dummy_object (type)
2442 tree type;
2443 {
2444 tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node);
2445 return build_indirect_ref (decl, NULL_PTR);
2446 }
2447
2448 /* We've gotten a reference to a member of TYPE. Return *this if appropriate,
2449 or a dummy object otherwise. If BINFOP is non-0, it is filled with the
2450 binfo path from current_class_type to TYPE, or 0. */
2451
2452 tree
2453 maybe_dummy_object (type, binfop)
2454 tree type;
2455 tree *binfop;
2456 {
2457 tree decl, context;
2458
2459 if (current_class_type
2460 && get_base_distance (type, current_class_type, 0, binfop) != -1)
2461 context = current_class_type;
2462 else
2463 {
2464 /* Reference from a nested class member function. */
2465 context = type;
2466 if (binfop)
2467 *binfop = TYPE_BINFO (type);
2468 }
2469
2470 if (current_class_ref && context == current_class_type)
2471 decl = current_class_ref;
2472 else
2473 decl = build_dummy_object (context);
2474
2475 return decl;
2476 }
2477
2478 /* Returns 1 if OB is a placeholder object, or a pointer to one. */
2479
2480 int
2481 is_dummy_object (ob)
2482 tree ob;
2483 {
2484 if (TREE_CODE (ob) == INDIRECT_REF)
2485 ob = TREE_OPERAND (ob, 0);
2486 return (TREE_CODE (ob) == NOP_EXPR
2487 && TREE_OPERAND (ob, 0) == void_zero_node);
2488 }
2489
2490 /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */
2491
2492 int
2493 pod_type_p (t)
2494 tree t;
2495 {
2496 while (TREE_CODE (t) == ARRAY_TYPE)
2497 t = TREE_TYPE (t);
2498
2499 if (INTEGRAL_TYPE_P (t))
2500 return 1; /* integral, character or enumeral type */
2501 if (FLOAT_TYPE_P (t))
2502 return 1;
2503 if (TYPE_PTR_P (t))
2504 return 1; /* pointer to non-member */
2505 if (TYPE_PTRMEM_P (t))
2506 return 1; /* pointer to member object */
2507 if (TYPE_PTRMEMFUNC_P (t))
2508 return 1; /* pointer to member function */
2509
2510 if (! CLASS_TYPE_P (t))
2511 return 0; /* other non-class type (reference or function) */
2512 if (CLASSTYPE_NON_POD_P (t))
2513 return 0;
2514 return 1;
2515 }
2516
2517 /* Return a 1 if ATTR_NAME and ATTR_ARGS denote a valid C++-specific
2518 attribute for either declaration DECL or type TYPE and 0 otherwise.
2519 Plugged into valid_lang_attribute. */
2520
2521 int
2522 cp_valid_lang_attribute (attr_name, attr_args, decl, type)
2523 tree attr_name;
2524 tree attr_args ATTRIBUTE_UNUSED;
2525 tree decl ATTRIBUTE_UNUSED;
2526 tree type ATTRIBUTE_UNUSED;
2527 {
2528 if (is_attribute_p ("com_interface", attr_name))
2529 {
2530 if (! flag_vtable_thunks)
2531 {
2532 error ("`com_interface' only supported with -fvtable-thunks");
2533 return 0;
2534 }
2535
2536 if (attr_args != NULL_TREE
2537 || decl != NULL_TREE
2538 || ! CLASS_TYPE_P (type)
2539 || type != TYPE_MAIN_VARIANT (type))
2540 {
2541 warning ("`com_interface' attribute can only be applied to class definitions");
2542 return 0;
2543 }
2544
2545 CLASSTYPE_COM_INTERFACE (type) = 1;
2546 return 1;
2547 }
2548 else if (is_attribute_p ("init_priority", attr_name))
2549 {
2550 tree initp_expr = (attr_args ? TREE_VALUE (attr_args): NULL_TREE);
2551 int pri;
2552
2553 if (initp_expr)
2554 STRIP_NOPS (initp_expr);
2555
2556 if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST)
2557 {
2558 error ("requested init_priority is not an integer constant");
2559 return 0;
2560 }
2561
2562 pri = TREE_INT_CST_LOW (initp_expr);
2563
2564 while (TREE_CODE (type) == ARRAY_TYPE)
2565 type = TREE_TYPE (type);
2566
2567 if (decl == NULL_TREE
2568 || TREE_CODE (decl) != VAR_DECL
2569 || ! TREE_STATIC (decl)
2570 || DECL_EXTERNAL (decl)
2571 || (TREE_CODE (type) != RECORD_TYPE
2572 && TREE_CODE (type) != UNION_TYPE)
2573 /* Static objects in functions are initialized the
2574 first time control passes through that
2575 function. This is not precise enough to pin down an
2576 init_priority value, so don't allow it. */
2577 || current_function_decl)
2578 {
2579 error ("can only use init_priority attribute on file-scope definitions of objects of class type");
2580 return 0;
2581 }
2582
2583 if (pri > MAX_INIT_PRIORITY || pri <= 0)
2584 {
2585 error ("requested init_priority is out of range");
2586 return 0;
2587 }
2588
2589 /* Check for init_priorities that are reserved for
2590 language and runtime support implementations.*/
2591 if (pri <= MAX_RESERVED_INIT_PRIORITY)
2592 {
2593 warning
2594 ("requested init_priority is reserved for internal use");
2595 }
2596
2597 DECL_INIT_PRIORITY (decl) = pri;
2598 return 1;
2599 }
2600
2601 return 0;
2602 }
2603
2604 /* Return a new PTRMEM_CST of the indicated TYPE. The MEMBER is the
2605 thing pointed to by the constant. */
2606
2607 tree
2608 make_ptrmem_cst (type, member)
2609 tree type;
2610 tree member;
2611 {
2612 tree ptrmem_cst = make_node (PTRMEM_CST);
2613 /* If would seem a great convenience if make_node would set
2614 TREE_CONSTANT for things of class `c', but it does not. */
2615 TREE_CONSTANT (ptrmem_cst) = 1;
2616 TREE_TYPE (ptrmem_cst) = type;
2617 PTRMEM_CST_MEMBER (ptrmem_cst) = member;
2618 return ptrmem_cst;
2619 }
2620
2621 /* Mark ARG (which is really a list_hash_table **) for GC. */
2622
2623 static void
2624 mark_list_hash (arg)
2625 void *arg;
2626 {
2627 struct list_hash *lh;
2628
2629 for (lh = * ((struct list_hash **) arg); lh; lh = lh->next)
2630 ggc_mark_tree (lh->list);
2631 }
2632
2633 /* Initialize tree.c. */
2634
2635 void
2636 init_tree ()
2637 {
2638 make_lang_type_fn = cp_make_lang_type;
2639 lang_unsave = cp_unsave;
2640 ggc_add_root (list_hash_table,
2641 sizeof (list_hash_table) / sizeof (struct list_hash *),
2642 sizeof (struct list_hash *),
2643 mark_list_hash);
2644 }
2645
2646 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2647 information indicating to what new SAVE_EXPR this one should be
2648 mapped, use that one. Otherwise, create a new node and enter it in
2649 ST. FN is the function into which the copy will be placed. */
2650
2651 void
2652 remap_save_expr (tp, st, fn)
2653 tree *tp;
2654 splay_tree st;
2655 tree fn;
2656 {
2657 splay_tree_node n;
2658
2659 /* See if we already encountered this SAVE_EXPR. */
2660 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2661
2662 /* If we didn't already remap this SAVE_EXPR, do so now. */
2663 if (!n)
2664 {
2665 tree t = copy_node (*tp);
2666
2667 /* The SAVE_EXPR is now part of the function into which we
2668 are inlining this body. */
2669 SAVE_EXPR_CONTEXT (t) = fn;
2670 /* And we haven't evaluated it yet. */
2671 SAVE_EXPR_RTL (t) = NULL_RTX;
2672 /* Remember this SAVE_EXPR. */
2673 n = splay_tree_insert (st,
2674 (splay_tree_key) *tp,
2675 (splay_tree_value) t);
2676 }
2677
2678 /* Replace this SAVE_EXPR with the copy. */
2679 *tp = (tree) n->value;
2680 }
2681
2682 /* Called via walk_tree. If *TP points to a DECL_STMT for a local
2683 declaration, copies the declaration and enters it in the splay_tree
2684 pointed to by DATA (which is really a `splay_tree *'). */
2685
2686 static tree
2687 mark_local_for_remap_r (tp, walk_subtrees, data)
2688 tree *tp;
2689 int *walk_subtrees ATTRIBUTE_UNUSED;
2690 void *data;
2691 {
2692 tree t = *tp;
2693 splay_tree st = (splay_tree) data;
2694
2695 if ((TREE_CODE (t) == DECL_STMT
2696 && nonstatic_local_decl_p (DECL_STMT_DECL (t)))
2697 || TREE_CODE (t) == LABEL_STMT)
2698 {
2699 tree decl;
2700 tree copy;
2701
2702 /* Figure out what's being declared. */
2703 decl = (TREE_CODE (t) == DECL_STMT
2704 ? DECL_STMT_DECL (t) : LABEL_STMT_LABEL (t));
2705
2706 /* Make a copy. */
2707 copy = copy_decl_for_inlining (decl,
2708 DECL_CONTEXT (decl),
2709 DECL_CONTEXT (decl));
2710
2711 /* Remember the copy. */
2712 splay_tree_insert (st,
2713 (splay_tree_key) decl,
2714 (splay_tree_value) copy);
2715 }
2716
2717 return NULL_TREE;
2718 }
2719
2720 /* Called via walk_tree when an expression is unsaved. Using the
2721 splay_tree pointed to by ST (which is really a `splay_tree *'),
2722 remaps all local declarations to appropriate replacements. */
2723
2724 static tree
2725 cp_unsave_r (tp, walk_subtrees, data)
2726 tree *tp;
2727 int *walk_subtrees;
2728 void *data;
2729 {
2730 splay_tree st = (splay_tree) data;
2731 splay_tree_node n;
2732
2733 /* Only a local declaration (variable or label). */
2734 if (nonstatic_local_decl_p (*tp))
2735 {
2736 /* Lookup the declaration. */
2737 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2738
2739 /* If it's there, remap it. */
2740 if (n)
2741 *tp = (tree) n->value;
2742 }
2743 else if (TREE_CODE (*tp) == SAVE_EXPR)
2744 remap_save_expr (tp, st, current_function_decl);
2745 else
2746 {
2747 copy_tree_r (tp, walk_subtrees, NULL);
2748
2749 /* Do whatever unsaving is required. */
2750 unsave_expr_1 (*tp);
2751 }
2752
2753 /* Keep iterating. */
2754 return NULL_TREE;
2755 }
2756
2757 /* Called by unsave_expr_now whenever an expression (*TP) needs to be
2758 unsaved. */
2759
2760 static void
2761 cp_unsave (tp)
2762 tree *tp;
2763 {
2764 splay_tree st;
2765
2766 /* Create a splay-tree to map old local variable declarations to new
2767 ones. */
2768 st = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2769
2770 /* Walk the tree once figuring out what needs to be remapped. */
2771 walk_tree (tp, mark_local_for_remap_r, st);
2772
2773 /* Walk the tree again, copying, remapping, and unsaving. */
2774 walk_tree (tp, cp_unsave_r, st);
2775
2776 /* Clean up. */
2777 splay_tree_delete (st);
2778 }