function.c (retrofit_block): Abort if we don't find a suitable insn.
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "toplev.h"
30 #include "ggc.h"
31 #include "insn-config.h"
32 #include "integrate.h"
33
34 static tree bot_manip PROTO((tree *, int *, void *));
35 static tree bot_replace PROTO((tree *, int *, void *));
36 static tree build_cplus_array_type_1 PROTO((tree, tree));
37 static void list_hash_add PROTO((int, tree));
38 static int list_hash PROTO((tree, tree, tree));
39 static tree list_hash_lookup PROTO((int, tree, tree, tree));
40 static void propagate_binfo_offsets PROTO((tree, tree));
41 static int avoid_overlap PROTO((tree, tree));
42 static cp_lvalue_kind lvalue_p_1 PROTO((tree, int));
43 static tree no_linkage_helper PROTO((tree *, int *, void *));
44 static tree build_srcloc PROTO((char *, int));
45 static void mark_list_hash PROTO ((void *));
46 static int statement_code_p PROTO((enum tree_code));
47 static tree mark_local_for_remap_r PROTO((tree *, int *, void *));
48 static tree cp_unsave_r PROTO ((tree *, int *, void *));
49 static void cp_unsave PROTO((tree *));
50 static tree build_target_expr PROTO((tree, tree));
51
52 #define CEIL(x,y) (((x) + (y) - 1) / (y))
53
54 /* If REF is an lvalue, returns the kind of lvalue that REF is.
55 Otherwise, returns clk_none. If TREAT_CLASS_RVALUES_AS_LVALUES is
56 non-zero, rvalues of class type are considered lvalues. */
57
58 static cp_lvalue_kind
59 lvalue_p_1 (ref, treat_class_rvalues_as_lvalues)
60 tree ref;
61 int treat_class_rvalues_as_lvalues;
62 {
63 cp_lvalue_kind op1_lvalue_kind = clk_none;
64 cp_lvalue_kind op2_lvalue_kind = clk_none;
65
66 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
67 return clk_ordinary;
68
69 if (ref == current_class_ptr && flag_this_is_variable <= 0)
70 return clk_none;
71
72 switch (TREE_CODE (ref))
73 {
74 /* preincrements and predecrements are valid lvals, provided
75 what they refer to are valid lvals. */
76 case PREINCREMENT_EXPR:
77 case PREDECREMENT_EXPR:
78 case SAVE_EXPR:
79 case UNSAVE_EXPR:
80 case TRY_CATCH_EXPR:
81 case WITH_CLEANUP_EXPR:
82 case REALPART_EXPR:
83 case IMAGPART_EXPR:
84 case NOP_EXPR:
85 return lvalue_p_1 (TREE_OPERAND (ref, 0),
86 treat_class_rvalues_as_lvalues);
87
88 case COMPONENT_REF:
89 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
90 treat_class_rvalues_as_lvalues);
91 if (op1_lvalue_kind
92 /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
93 situations. */
94 && TREE_CODE (TREE_OPERAND (ref, 1)) == FIELD_DECL
95 && DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
96 {
97 /* Clear the ordinary bit. If this object was a class
98 rvalue we want to preserve that information. */
99 op1_lvalue_kind &= ~clk_ordinary;
100 /* The lvalue is for a btifield. */
101 op1_lvalue_kind |= clk_bitfield;
102 }
103 return op1_lvalue_kind;
104
105 case STRING_CST:
106 return clk_ordinary;
107
108 case VAR_DECL:
109 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
110 && DECL_LANG_SPECIFIC (ref)
111 && DECL_IN_AGGR_P (ref))
112 return clk_none;
113 case INDIRECT_REF:
114 case ARRAY_REF:
115 case PARM_DECL:
116 case RESULT_DECL:
117 if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
118 return clk_ordinary;
119 break;
120
121 /* A currently unresolved scope ref. */
122 case SCOPE_REF:
123 my_friendly_abort (103);
124 case OFFSET_REF:
125 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
126 return clk_ordinary;
127 /* Fall through. */
128 case MAX_EXPR:
129 case MIN_EXPR:
130 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
131 treat_class_rvalues_as_lvalues);
132 op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
133 treat_class_rvalues_as_lvalues);
134 break;
135
136 case COND_EXPR:
137 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
138 treat_class_rvalues_as_lvalues);
139 op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2),
140 treat_class_rvalues_as_lvalues);
141 break;
142
143 case MODIFY_EXPR:
144 return clk_ordinary;
145
146 case COMPOUND_EXPR:
147 return lvalue_p_1 (TREE_OPERAND (ref, 1),
148 treat_class_rvalues_as_lvalues);
149
150 case TARGET_EXPR:
151 return treat_class_rvalues_as_lvalues ? clk_class : clk_none;
152
153 case CALL_EXPR:
154 case VA_ARG_EXPR:
155 return ((treat_class_rvalues_as_lvalues
156 && IS_AGGR_TYPE (TREE_TYPE (ref)))
157 ? clk_class : clk_none);
158
159 case FUNCTION_DECL:
160 /* All functions (except non-static-member functions) are
161 lvalues. */
162 return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref)
163 ? clk_none : clk_ordinary);
164
165 default:
166 break;
167 }
168
169 /* If one operand is not an lvalue at all, then this expression is
170 not an lvalue. */
171 if (!op1_lvalue_kind || !op2_lvalue_kind)
172 return clk_none;
173
174 /* Otherwise, it's an lvalue, and it has all the odd properties
175 contributed by either operand. */
176 op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
177 /* It's not an ordinary lvalue if it involves either a bit-field or
178 a class rvalue. */
179 if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
180 op1_lvalue_kind &= ~clk_ordinary;
181 return op1_lvalue_kind;
182 }
183
184 /* If REF is an lvalue, returns the kind of lvalue that REF is.
185 Otherwise, returns clk_none. Lvalues can be assigned, unless they
186 have TREE_READONLY, or unless they are FUNCTION_DECLs. Lvalues can
187 have their address taken, unless they have DECL_REGISTER. */
188
189 cp_lvalue_kind
190 real_lvalue_p (ref)
191 tree ref;
192 {
193 return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/0);
194 }
195
196 /* This differs from real_lvalue_p in that class rvalues are
197 considered lvalues. */
198
199 int
200 lvalue_p (ref)
201 tree ref;
202 {
203 return
204 (lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1) != clk_none);
205 }
206
207 /* Return nonzero if REF is an lvalue valid for this language;
208 otherwise, print an error message and return zero. */
209
210 int
211 lvalue_or_else (ref, string)
212 tree ref;
213 const char *string;
214 {
215 int win = lvalue_p (ref);
216 if (! win)
217 error ("non-lvalue in %s", string);
218 return win;
219 }
220
221 /* Build a TARGET_EXPR, initializing the DECL with the VALUE. */
222
223 static tree
224 build_target_expr (decl, value)
225 tree decl;
226 tree value;
227 {
228 tree t;
229
230 t = build (TARGET_EXPR, TREE_TYPE (decl), decl, value,
231 maybe_build_cleanup (decl), NULL_TREE);
232 /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
233 ignore the TARGET_EXPR. If there really turn out to be no
234 side-effects, then the optimizer should be able to get rid of
235 whatever code is generated anyhow. */
236 TREE_SIDE_EFFECTS (t) = 1;
237
238 return t;
239 }
240
241 /* INIT is a CALL_EXPR which needs info about its target.
242 TYPE is the type that this initialization should appear to have.
243
244 Build an encapsulation of the initialization to perform
245 and return it so that it can be processed by language-independent
246 and language-specific expression expanders. */
247
248 tree
249 build_cplus_new (type, init)
250 tree type;
251 tree init;
252 {
253 tree fn;
254 tree slot;
255 tree rval;
256
257 /* Make sure that we're not trying to create an instance of an
258 abstract class. */
259 abstract_virtuals_error (NULL_TREE, type);
260
261 if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR)
262 return convert (type, init);
263
264 slot = build (VAR_DECL, type);
265 DECL_ARTIFICIAL (slot) = 1;
266 DECL_CONTEXT (slot) = current_function_decl;
267 layout_decl (slot, 0);
268
269 /* We split the CALL_EXPR into its function and its arguments here.
270 Then, in expand_expr, we put them back together. The reason for
271 this is that this expression might be a default argument
272 expression. In that case, we need a new temporary every time the
273 expression is used. That's what break_out_target_exprs does; it
274 replaces every AGGR_INIT_EXPR with a copy that uses a fresh
275 temporary slot. Then, expand_expr builds up a call-expression
276 using the new slot. */
277 fn = TREE_OPERAND (init, 0);
278 rval = build (AGGR_INIT_EXPR, type, fn, TREE_OPERAND (init, 1), slot);
279 TREE_SIDE_EFFECTS (rval) = 1;
280 AGGR_INIT_VIA_CTOR_P (rval)
281 = (TREE_CODE (fn) == ADDR_EXPR
282 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
283 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));
284 rval = build_target_expr (slot, rval);
285
286 return rval;
287 }
288
289 /* Buidl a TARGET_EXPR using INIT to initialize a new temporary of the
290 indicated TYPE. */
291
292 tree
293 build_target_expr_with_type (init, type)
294 tree init;
295 tree type;
296 {
297 tree slot;
298 tree rval;
299
300 slot = build (VAR_DECL, type);
301 DECL_ARTIFICIAL (slot) = 1;
302 DECL_CONTEXT (slot) = current_function_decl;
303 layout_decl (slot, 0);
304 rval = build_target_expr (slot, init);
305
306 return rval;
307 }
308
309 /* Like build_target_expr_with_type, but use the type of INIT. */
310
311 tree
312 get_target_expr (init)
313 tree init;
314 {
315 return build_target_expr_with_type (init, TREE_TYPE (init));
316 }
317
318 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
319 these CALL_EXPRs with tree nodes that will perform the cleanups. */
320
321 tree
322 break_out_cleanups (exp)
323 tree exp;
324 {
325 tree tmp = exp;
326
327 if (TREE_CODE (tmp) == CALL_EXPR
328 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
329 return build_cplus_new (TREE_TYPE (tmp), tmp);
330
331 while (TREE_CODE (tmp) == NOP_EXPR
332 || TREE_CODE (tmp) == CONVERT_EXPR
333 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
334 {
335 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
336 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
337 {
338 TREE_OPERAND (tmp, 0)
339 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
340 TREE_OPERAND (tmp, 0));
341 break;
342 }
343 else
344 tmp = TREE_OPERAND (tmp, 0);
345 }
346 return exp;
347 }
348
349 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
350 copies where they are found. Returns a deep copy all nodes transitively
351 containing CALL_EXPRs. */
352
353 tree
354 break_out_calls (exp)
355 tree exp;
356 {
357 register tree t1, t2 = NULL_TREE;
358 register enum tree_code code;
359 register int changed = 0;
360 register int i;
361
362 if (exp == NULL_TREE)
363 return exp;
364
365 code = TREE_CODE (exp);
366
367 if (code == CALL_EXPR)
368 return copy_node (exp);
369
370 /* Don't try and defeat a save_expr, as it should only be done once. */
371 if (code == SAVE_EXPR)
372 return exp;
373
374 switch (TREE_CODE_CLASS (code))
375 {
376 default:
377 abort ();
378
379 case 'c': /* a constant */
380 case 't': /* a type node */
381 case 'x': /* something random, like an identifier or an ERROR_MARK. */
382 return exp;
383
384 case 'd': /* A decl node */
385 #if 0 /* This is bogus. jason 9/21/94 */
386
387 t1 = break_out_calls (DECL_INITIAL (exp));
388 if (t1 != DECL_INITIAL (exp))
389 {
390 exp = copy_node (exp);
391 DECL_INITIAL (exp) = t1;
392 }
393 #endif
394 return exp;
395
396 case 'b': /* A block node */
397 {
398 /* Don't know how to handle these correctly yet. Must do a
399 break_out_calls on all DECL_INITIAL values for local variables,
400 and also break_out_calls on all sub-blocks and sub-statements. */
401 abort ();
402 }
403 return exp;
404
405 case 'e': /* an expression */
406 case 'r': /* a reference */
407 case 's': /* an expression with side effects */
408 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
409 {
410 t1 = break_out_calls (TREE_OPERAND (exp, i));
411 if (t1 != TREE_OPERAND (exp, i))
412 {
413 exp = copy_node (exp);
414 TREE_OPERAND (exp, i) = t1;
415 }
416 }
417 return exp;
418
419 case '<': /* a comparison expression */
420 case '2': /* a binary arithmetic expression */
421 t2 = break_out_calls (TREE_OPERAND (exp, 1));
422 if (t2 != TREE_OPERAND (exp, 1))
423 changed = 1;
424 case '1': /* a unary arithmetic expression */
425 t1 = break_out_calls (TREE_OPERAND (exp, 0));
426 if (t1 != TREE_OPERAND (exp, 0))
427 changed = 1;
428 if (changed)
429 {
430 if (tree_code_length[(int) code] == 1)
431 return build1 (code, TREE_TYPE (exp), t1);
432 else
433 return build (code, TREE_TYPE (exp), t1, t2);
434 }
435 return exp;
436 }
437
438 }
439 \f
440 extern struct obstack permanent_obstack;
441
442 /* Here is how primitive or already-canonicalized types' hash
443 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
444 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
445
446 /* Construct, lay out and return the type of methods belonging to class
447 BASETYPE and whose arguments are described by ARGTYPES and whose values
448 are described by RETTYPE. If each type exists already, reuse it. */
449
450 tree
451 build_cplus_method_type (basetype, rettype, argtypes)
452 tree basetype, rettype, argtypes;
453 {
454 register tree t;
455 tree ptype;
456 int hashcode;
457
458 /* Make a node of the sort we want. */
459 t = make_node (METHOD_TYPE);
460
461 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
462 TREE_TYPE (t) = rettype;
463 ptype = build_pointer_type (basetype);
464
465 /* The actual arglist for this function includes a "hidden" argument
466 which is "this". Put it into the list of argument types. Make
467 sure that the new argument list is allocated on the same obstack
468 as the type. */
469 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
470 TYPE_ARG_TYPES (t) = argtypes;
471 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
472
473 /* If we already have such a type, use the old one and free this one.
474 Note that it also frees up the above cons cell if found. */
475 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) +
476 type_hash_list (argtypes);
477
478 t = type_hash_canon (hashcode, t);
479
480 if (TYPE_SIZE (t) == 0)
481 layout_type (t);
482
483 return t;
484 }
485
486 static tree
487 build_cplus_array_type_1 (elt_type, index_type)
488 tree elt_type;
489 tree index_type;
490 {
491 tree t;
492
493 if (elt_type == error_mark_node || index_type == error_mark_node)
494 return error_mark_node;
495
496 if (processing_template_decl
497 || uses_template_parms (elt_type)
498 || uses_template_parms (index_type))
499 {
500 t = make_node (ARRAY_TYPE);
501 TREE_TYPE (t) = elt_type;
502 TYPE_DOMAIN (t) = index_type;
503 }
504 else
505 t = build_array_type (elt_type, index_type);
506
507 /* Push these needs up so that initialization takes place
508 more easily. */
509 TYPE_NEEDS_CONSTRUCTING (t)
510 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
511 TYPE_NEEDS_DESTRUCTOR (t)
512 = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
513 return t;
514 }
515
516 tree
517 build_cplus_array_type (elt_type, index_type)
518 tree elt_type;
519 tree index_type;
520 {
521 tree t;
522 int type_quals = CP_TYPE_QUALS (elt_type);
523
524 elt_type = TYPE_MAIN_VARIANT (elt_type);
525
526 t = build_cplus_array_type_1 (elt_type, index_type);
527
528 if (type_quals != TYPE_UNQUALIFIED)
529 t = cp_build_qualified_type (t, type_quals);
530
531 return t;
532 }
533 \f
534 /* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles
535 arrays correctly. In particular, if TYPE is an array of T's, and
536 TYPE_QUALS is non-empty, returns an array of qualified T's. If
537 at attempt is made to qualify a type illegally, and COMPLAIN is
538 non-zero, an error is issued. If COMPLAIN is zero, error_mark_node
539 is returned. */
540
541 tree
542 cp_build_qualified_type_real (type, type_quals, complain)
543 tree type;
544 int type_quals;
545 int complain;
546 {
547 tree result;
548
549 if (type == error_mark_node)
550 return type;
551
552 if (type_quals == TYPE_QUALS (type))
553 return type;
554
555 /* A restrict-qualified pointer type must be a pointer (or reference)
556 to object or incomplete type. */
557 if ((type_quals & TYPE_QUAL_RESTRICT)
558 && TREE_CODE (type) != TEMPLATE_TYPE_PARM
559 && (!POINTER_TYPE_P (type)
560 || TYPE_PTRMEM_P (type)
561 || TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE))
562 {
563 if (complain)
564 cp_error ("`%T' cannot be `restrict'-qualified", type);
565 else
566 return error_mark_node;
567
568 type_quals &= ~TYPE_QUAL_RESTRICT;
569 }
570
571 if (type_quals != TYPE_UNQUALIFIED
572 && TREE_CODE (type) == FUNCTION_TYPE)
573 {
574 if (complain)
575 cp_error ("`%T' cannot be `const'-, `volatile'-, or `restrict'-qualified", type);
576 else
577 return error_mark_node;
578 type_quals = TYPE_UNQUALIFIED;
579 }
580 else if (TREE_CODE (type) == ARRAY_TYPE)
581 {
582 /* In C++, the qualification really applies to the array element
583 type. Obtain the appropriately qualified element type. */
584 tree t;
585 tree element_type
586 = cp_build_qualified_type_real (TREE_TYPE (type),
587 type_quals,
588 complain);
589
590 if (element_type == error_mark_node)
591 return error_mark_node;
592
593 /* See if we already have an identically qualified type. */
594 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
595 if (CP_TYPE_QUALS (t) == type_quals)
596 break;
597
598 /* If we didn't already have it, create it now. */
599 if (!t)
600 {
601 /* Make a new array type, just like the old one, but with the
602 appropriately qualified element type. */
603 t = build_type_copy (type);
604 TREE_TYPE (t) = element_type;
605 }
606
607 /* Even if we already had this variant, we update
608 TYPE_NEEDS_CONSTRUCTING and TYPE_NEEDS_DESTRUCTOR in case
609 they changed since the variant was originally created.
610
611 This seems hokey; if there is some way to use a previous
612 variant *without* coming through here,
613 TYPE_NEEDS_CONSTRUCTING will never be updated. */
614 TYPE_NEEDS_CONSTRUCTING (t)
615 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
616 TYPE_NEEDS_DESTRUCTOR (t)
617 = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
618 return t;
619 }
620 else if (TYPE_PTRMEMFUNC_P (type))
621 {
622 /* For a pointer-to-member type, we can't just return a
623 cv-qualified version of the RECORD_TYPE. If we do, we
624 haven't change the field that contains the actual pointer to
625 a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong. */
626 tree t;
627
628 t = TYPE_PTRMEMFUNC_FN_TYPE (type);
629 t = cp_build_qualified_type_real (t, type_quals, complain);
630 return build_ptrmemfunc_type (t);
631 }
632
633 /* Retrieve (or create) the appropriately qualified variant. */
634 result = build_qualified_type (type, type_quals);
635
636 /* If this was a pointer-to-method type, and we just made a copy,
637 then we need to clear the cached associated
638 pointer-to-member-function type; it is not valid for the new
639 type. */
640 if (result != type
641 && TREE_CODE (type) == POINTER_TYPE
642 && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
643 TYPE_SET_PTRMEMFUNC_TYPE (result, NULL_TREE);
644
645 return result;
646 }
647
648 /* Returns the canonical version of TYPE. In other words, if TYPE is
649 a typedef, returns the underlying type. The cv-qualification of
650 the type returned matches the type input; they will always be
651 compatible types. */
652
653 tree
654 canonical_type_variant (t)
655 tree t;
656 {
657 return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), CP_TYPE_QUALS (t));
658 }
659 \f
660 /* Add OFFSET to all base types of T.
661
662 OFFSET, which is a type offset, is number of bytes.
663
664 Note that we don't have to worry about having two paths to the
665 same base type, since this type owns its association list. */
666
667 static void
668 propagate_binfo_offsets (binfo, offset)
669 tree binfo;
670 tree offset;
671 {
672 tree binfos = BINFO_BASETYPES (binfo);
673 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
674
675 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
676 {
677 tree base_binfo = TREE_VEC_ELT (binfos, i);
678
679 if (TREE_VIA_VIRTUAL (base_binfo))
680 i += 1;
681 else
682 {
683 int j;
684 tree delta = NULL_TREE;
685
686 for (j = i+1; j < n_baselinks; j++)
687 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
688 {
689 /* The next basetype offset must take into account the space
690 between the classes, not just the size of each class. */
691 delta = size_binop (MINUS_EXPR,
692 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
693 BINFO_OFFSET (base_binfo));
694 break;
695 }
696
697 #if 0
698 if (BINFO_OFFSET_ZEROP (base_binfo))
699 BINFO_OFFSET (base_binfo) = offset;
700 else
701 BINFO_OFFSET (base_binfo)
702 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
703 #else
704 BINFO_OFFSET (base_binfo) = offset;
705 #endif
706
707 propagate_binfo_offsets (base_binfo, offset);
708
709 /* Go to our next class that counts for offset propagation. */
710 i = j;
711 if (i < n_baselinks)
712 offset = size_binop (PLUS_EXPR, offset, delta);
713 }
714 }
715 }
716
717 /* Makes new binfos for the indirect bases under BINFO, and updates
718 BINFO_OFFSET for them and their bases. */
719
720 void
721 unshare_base_binfos (binfo)
722 tree binfo;
723 {
724 tree binfos = BINFO_BASETYPES (binfo);
725 tree new_binfo;
726 int j;
727
728 if (binfos == NULL_TREE)
729 return;
730
731 /* Now unshare the structure beneath BINFO. */
732 for (j = TREE_VEC_LENGTH (binfos)-1;
733 j >= 0; j--)
734 {
735 tree base_binfo = TREE_VEC_ELT (binfos, j);
736 new_binfo = TREE_VEC_ELT (binfos, j)
737 = make_binfo (BINFO_OFFSET (base_binfo),
738 base_binfo,
739 BINFO_VTABLE (base_binfo),
740 BINFO_VIRTUALS (base_binfo));
741 TREE_VIA_PUBLIC (new_binfo) = TREE_VIA_PUBLIC (base_binfo);
742 TREE_VIA_PROTECTED (new_binfo) = TREE_VIA_PROTECTED (base_binfo);
743 TREE_VIA_VIRTUAL (new_binfo) = TREE_VIA_VIRTUAL (base_binfo);
744 BINFO_INHERITANCE_CHAIN (new_binfo) = binfo;
745 unshare_base_binfos (new_binfo);
746 }
747 }
748
749 /* Finish the work of layout_record, now taking virtual bases into account.
750 Also compute the actual offsets that our base classes will have.
751 This must be performed after the fields are laid out, since virtual
752 baseclasses must lay down at the end of the record.
753
754 Returns the maximum number of virtual functions any of the
755 baseclasses provide. */
756
757 int
758 layout_basetypes (rec, max)
759 tree rec;
760 int max;
761 {
762 tree binfos = TYPE_BINFO_BASETYPES (rec);
763 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
764
765 tree vbase_types;
766
767 unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
768 unsigned int desired_align;
769
770 /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer. */
771 register unsigned int const_size = 0;
772 unsigned int nonvirtual_const_size;
773
774 #ifdef STRUCTURE_SIZE_BOUNDARY
775 /* Packed structures don't need to have minimum size. */
776 if (! TYPE_PACKED (rec))
777 record_align = MAX (record_align, STRUCTURE_SIZE_BOUNDARY);
778 #endif
779
780 /* Get all the virtual base types that this type uses. The
781 TREE_VALUE slot holds the virtual baseclass type. Note that
782 get_vbase_types makes copies of the virtual base BINFOs, so that
783 the vbase_types are unshared. */
784 vbase_types = CLASSTYPE_VBASECLASSES (rec);
785
786 my_friendly_assert (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST, 19970302);
787 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
788
789 nonvirtual_const_size = const_size;
790
791 while (vbase_types)
792 {
793 tree basetype = BINFO_TYPE (vbase_types);
794 tree offset;
795
796 desired_align = TYPE_ALIGN (basetype);
797 record_align = MAX (record_align, desired_align);
798
799 if (const_size == 0)
800 offset = integer_zero_node;
801 else
802 {
803 /* Give each virtual base type the alignment it wants. */
804 const_size = CEIL (const_size, desired_align) * desired_align;
805 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
806 }
807
808 if (CLASSTYPE_VSIZE (basetype) > max)
809 max = CLASSTYPE_VSIZE (basetype);
810 BINFO_OFFSET (vbase_types) = offset;
811
812 /* Every virtual baseclass takes a least a UNIT, so that we can
813 take it's address and get something different for each base. */
814 const_size += MAX (BITS_PER_UNIT,
815 TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype)));
816
817 vbase_types = TREE_CHAIN (vbase_types);
818 }
819
820 if (const_size)
821 {
822 /* Because a virtual base might take a single byte above,
823 we have to re-adjust the total size to make sure it is
824 a multiple of the alignment. */
825 /* Give the whole object the alignment it wants. */
826 const_size = CEIL (const_size, record_align) * record_align;
827 }
828
829 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
830 here, as that is for this class, without any virtual base classes. */
831 TYPE_ALIGN (rec) = record_align;
832 if (const_size != nonvirtual_const_size)
833 {
834 TYPE_SIZE (rec) = size_int (const_size);
835 TYPE_SIZE_UNIT (rec) = size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rec),
836 size_int (BITS_PER_UNIT));
837 }
838
839 /* Now propagate offset information throughout the lattice. */
840 for (i = 0; i < n_baseclasses; i++)
841 {
842 register tree base_binfo = TREE_VEC_ELT (binfos, i);
843 register tree basetype = BINFO_TYPE (base_binfo);
844 tree field = TYPE_FIELDS (rec);
845
846 if (TREE_VIA_VIRTUAL (base_binfo))
847 continue;
848
849 my_friendly_assert (TREE_TYPE (field) == basetype, 23897);
850
851 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
852 cp_warning ("direct base `%T' inaccessible in `%T' due to ambiguity",
853 basetype, rec);
854
855 BINFO_OFFSET (base_binfo)
856 = size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field)),
857 BITS_PER_UNIT));
858 propagate_binfo_offsets (base_binfo, BINFO_OFFSET (base_binfo));
859 TYPE_FIELDS (rec) = TREE_CHAIN (field);
860 }
861
862 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
863 vbase_types = TREE_CHAIN (vbase_types))
864 {
865 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
866 unshare_base_binfos (vbase_types);
867 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
868
869 if (extra_warnings)
870 {
871 tree basetype = BINFO_TYPE (vbase_types);
872 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
873 cp_warning ("virtual base `%T' inaccessible in `%T' due to ambiguity",
874 basetype, rec);
875 }
876 }
877
878 return max;
879 }
880
881 /* If the empty base field in DECL overlaps with a base of the same type in
882 NEWDECL, which is either another base field or the first data field of
883 the class, pad the base just before NEWDECL and return 1. Otherwise,
884 return 0. */
885
886 static int
887 avoid_overlap (decl, newdecl)
888 tree decl, newdecl;
889 {
890 tree field;
891
892 if (newdecl == NULL_TREE
893 || ! types_overlap_p (TREE_TYPE (decl), TREE_TYPE (newdecl)))
894 return 0;
895
896 for (field = decl; TREE_CHAIN (field) && TREE_CHAIN (field) != newdecl;
897 field = TREE_CHAIN (field))
898 ;
899
900 DECL_SIZE (field) = integer_one_node;
901
902 return 1;
903 }
904
905 /* Returns a list of fields to stand in for the base class subobjects
906 of REC. These fields are later removed by layout_basetypes. */
907
908 tree
909 build_base_fields (rec)
910 tree rec;
911 {
912 /* Chain to hold all the new FIELD_DECLs which stand in for base class
913 subobjects. */
914 tree base_decls = NULL_TREE;
915 tree binfos = TYPE_BINFO_BASETYPES (rec);
916 int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
917 tree decl, nextdecl;
918 int i, saw_empty = 0;
919 unsigned int base_align = 0;
920
921 for (i = 0; i < n_baseclasses; ++i)
922 {
923 register tree base_binfo = TREE_VEC_ELT (binfos, i);
924 register tree basetype = BINFO_TYPE (base_binfo);
925
926 if (TYPE_SIZE (basetype) == 0)
927 /* This error is now reported in xref_tag, thus giving better
928 location information. */
929 continue;
930
931 if (TREE_VIA_VIRTUAL (base_binfo))
932 continue;
933
934 decl = build_lang_decl (FIELD_DECL, NULL_TREE, basetype);
935 DECL_ARTIFICIAL (decl) = 1;
936 DECL_FIELD_CONTEXT (decl) = DECL_CLASS_CONTEXT (decl) = rec;
937 DECL_SIZE (decl) = CLASSTYPE_SIZE (basetype);
938 DECL_ALIGN (decl) = CLASSTYPE_ALIGN (basetype);
939 TREE_CHAIN (decl) = base_decls;
940 base_decls = decl;
941
942 if (! flag_new_abi)
943 {
944 /* Brain damage for backwards compatibility. For no good reason,
945 the old layout_basetypes made every base at least as large as
946 the alignment for the bases up to that point, gratuitously
947 wasting space. So we do the same thing here. */
948 base_align = MAX (base_align, DECL_ALIGN (decl));
949 DECL_SIZE (decl)
950 = size_int (MAX (TREE_INT_CST_LOW (DECL_SIZE (decl)),
951 (int) base_align));
952 }
953 else if (DECL_SIZE (decl) == integer_zero_node)
954 saw_empty = 1;
955 }
956
957 /* Reverse the list of fields so we allocate the bases in the proper
958 order. */
959 base_decls = nreverse (base_decls);
960
961 /* In the presence of empty base classes, we run the risk of allocating
962 two objects of the same class on top of one another. Avoid that. */
963 if (flag_new_abi && saw_empty)
964 for (decl = base_decls; decl; decl = TREE_CHAIN (decl))
965 {
966 if (DECL_SIZE (decl) == integer_zero_node)
967 {
968 /* First step through the following bases until we find
969 an overlap or a non-empty base. */
970 for (nextdecl = TREE_CHAIN (decl); nextdecl;
971 nextdecl = TREE_CHAIN (nextdecl))
972 {
973 if (avoid_overlap (decl, nextdecl)
974 || DECL_SIZE (nextdecl) != integer_zero_node)
975 goto nextbase;
976 }
977
978 /* If we're still looking, also check against the first
979 field. */
980 for (nextdecl = TYPE_FIELDS (rec);
981 nextdecl && TREE_CODE (nextdecl) != FIELD_DECL;
982 nextdecl = TREE_CHAIN (nextdecl))
983 /* keep looking */;
984 avoid_overlap (decl, nextdecl);
985 }
986 nextbase:;
987 }
988
989 return base_decls;
990 }
991
992 /* Returns list of virtual base class pointers in a FIELD_DECL chain. */
993
994 tree
995 build_vbase_pointer_fields (rec)
996 tree rec;
997 {
998 /* Chain to hold all the new FIELD_DECLs which point at virtual
999 base classes. */
1000 tree vbase_decls = NULL_TREE;
1001 tree binfos = TYPE_BINFO_BASETYPES (rec);
1002 int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
1003 tree decl;
1004 int i;
1005
1006 /* Handle basetypes almost like fields, but record their
1007 offsets differently. */
1008
1009 for (i = 0; i < n_baseclasses; i++)
1010 {
1011 register tree base_binfo = TREE_VEC_ELT (binfos, i);
1012 register tree basetype = BINFO_TYPE (base_binfo);
1013
1014 if (TYPE_SIZE (basetype) == 0)
1015 /* This error is now reported in xref_tag, thus giving better
1016 location information. */
1017 continue;
1018
1019 /* All basetypes are recorded in the association list of the
1020 derived type. */
1021
1022 if (TREE_VIA_VIRTUAL (base_binfo))
1023 {
1024 int j;
1025 const char *name;
1026
1027 /* The offset for a virtual base class is only used in computing
1028 virtual function tables and for initializing virtual base
1029 pointers. It is built once `get_vbase_types' is called. */
1030
1031 /* If this basetype can come from another vbase pointer
1032 without an additional indirection, we will share
1033 that pointer. If an indirection is involved, we
1034 make our own pointer. */
1035 for (j = 0; j < n_baseclasses; j++)
1036 {
1037 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
1038 if (! TREE_VIA_VIRTUAL (other_base_binfo)
1039 && binfo_member (basetype,
1040 CLASSTYPE_VBASECLASSES (BINFO_TYPE
1041 (other_base_binfo))
1042 ))
1043 goto got_it;
1044 }
1045 FORMAT_VBASE_NAME (name, basetype);
1046 decl = build_lang_decl (FIELD_DECL, get_identifier (name),
1047 build_pointer_type (basetype));
1048 /* If you change any of the below, take a look at all the
1049 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
1050 them too. */
1051 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
1052 DECL_VIRTUAL_P (decl) = 1;
1053 DECL_ARTIFICIAL (decl) = 1;
1054 DECL_FIELD_CONTEXT (decl) = rec;
1055 DECL_CLASS_CONTEXT (decl) = rec;
1056 DECL_FCONTEXT (decl) = basetype;
1057 DECL_SAVED_INSNS (decl) = 0;
1058 DECL_FIELD_SIZE (decl) = 0;
1059 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
1060 TREE_CHAIN (decl) = vbase_decls;
1061 BINFO_VPTR_FIELD (base_binfo) = decl;
1062 vbase_decls = decl;
1063
1064 got_it:
1065 /* The space this decl occupies has already been accounted for. */
1066 ;
1067 }
1068 }
1069
1070 return vbase_decls;
1071 }
1072 \f
1073 /* Hashing of lists so that we don't make duplicates.
1074 The entry point is `list_hash_canon'. */
1075
1076 /* Each hash table slot is a bucket containing a chain
1077 of these structures. */
1078
1079 struct list_hash
1080 {
1081 struct list_hash *next; /* Next structure in the bucket. */
1082 int hashcode; /* Hash code of this list. */
1083 tree list; /* The list recorded here. */
1084 };
1085
1086 /* Now here is the hash table. When recording a list, it is added
1087 to the slot whose index is the hash code mod the table size.
1088 Note that the hash table is used for several kinds of lists.
1089 While all these live in the same table, they are completely independent,
1090 and the hash code is computed differently for each of these. */
1091
1092 #define TYPE_HASH_SIZE 59
1093 static struct list_hash *list_hash_table[TYPE_HASH_SIZE];
1094
1095 /* Compute a hash code for a list (chain of TREE_LIST nodes
1096 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
1097 TREE_COMMON slots), by adding the hash codes of the individual entries. */
1098
1099 static int
1100 list_hash (purpose, value, chain)
1101 tree purpose, value, chain;
1102 {
1103 register int hashcode = 0;
1104
1105 if (chain)
1106 hashcode += TYPE_HASH (chain);
1107
1108 if (value)
1109 hashcode += TYPE_HASH (value);
1110 else
1111 hashcode += 1007;
1112 if (purpose)
1113 hashcode += TYPE_HASH (purpose);
1114 else
1115 hashcode += 1009;
1116 return hashcode;
1117 }
1118
1119 /* Look in the type hash table for a type isomorphic to TYPE.
1120 If one is found, return it. Otherwise return 0. */
1121
1122 static tree
1123 list_hash_lookup (hashcode, purpose, value, chain)
1124 int hashcode;
1125 tree purpose, value, chain;
1126 {
1127 register struct list_hash *h;
1128
1129 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
1130 if (h->hashcode == hashcode
1131 && TREE_PURPOSE (h->list) == purpose
1132 && TREE_VALUE (h->list) == value
1133 && TREE_CHAIN (h->list) == chain)
1134 return h->list;
1135 return 0;
1136 }
1137
1138 /* Add an entry to the list-hash-table
1139 for a list TYPE whose hash code is HASHCODE. */
1140
1141 static void
1142 list_hash_add (hashcode, list)
1143 int hashcode;
1144 tree list;
1145 {
1146 register struct list_hash *h;
1147
1148 h = (struct list_hash *) obstack_alloc (&permanent_obstack, sizeof (struct list_hash));
1149 h->hashcode = hashcode;
1150 h->list = list;
1151 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
1152 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1153 }
1154
1155 /* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical
1156 object for an identical list if one already exists. Otherwise, build a
1157 new one, and record it as the canonical object. */
1158
1159 /* Set to 1 to debug without canonicalization. Never set by program. */
1160
1161 static int debug_no_list_hash = 0;
1162
1163 tree
1164 hash_tree_cons (purpose, value, chain)
1165 tree purpose, value, chain;
1166 {
1167 tree t;
1168 int hashcode = 0;
1169
1170 if (! debug_no_list_hash)
1171 {
1172 hashcode = list_hash (purpose, value, chain);
1173 t = list_hash_lookup (hashcode, purpose, value, chain);
1174 if (t)
1175 return t;
1176 }
1177
1178 t = tree_cons (purpose, value, chain);
1179
1180 /* If this is a new list, record it for later reuse. */
1181 if (! debug_no_list_hash)
1182 list_hash_add (hashcode, t);
1183
1184 return t;
1185 }
1186
1187 /* Constructor for hashed lists. */
1188
1189 tree
1190 hash_tree_chain (value, chain)
1191 tree value, chain;
1192 {
1193 return hash_tree_cons (NULL_TREE, value, chain);
1194 }
1195
1196 /* Similar, but used for concatenating two lists. */
1197
1198 tree
1199 hash_chainon (list1, list2)
1200 tree list1, list2;
1201 {
1202 if (list2 == 0)
1203 return list1;
1204 if (list1 == 0)
1205 return list2;
1206 if (TREE_CHAIN (list1) == NULL_TREE)
1207 return hash_tree_chain (TREE_VALUE (list1), list2);
1208 return hash_tree_chain (TREE_VALUE (list1),
1209 hash_chainon (TREE_CHAIN (list1), list2));
1210 }
1211 \f
1212 /* Build an association between TYPE and some parameters:
1213
1214 OFFSET is the offset added to `this' to convert it to a pointer
1215 of type `TYPE *'
1216
1217 BINFO is the base binfo to use, if we are deriving from one. This
1218 is necessary, as we want specialized parent binfos from base
1219 classes, so that the VTABLE_NAMEs of bases are for the most derived
1220 type, instead of the simple type.
1221
1222 VTABLE is the virtual function table with which to initialize
1223 sub-objects of type TYPE.
1224
1225 VIRTUALS are the virtual functions sitting in VTABLE. */
1226
1227 tree
1228 make_binfo (offset, binfo, vtable, virtuals)
1229 tree offset, binfo;
1230 tree vtable, virtuals;
1231 {
1232 tree new_binfo = make_tree_vec (7);
1233 tree type;
1234
1235 if (TREE_CODE (binfo) == TREE_VEC)
1236 type = BINFO_TYPE (binfo);
1237 else
1238 {
1239 type = binfo;
1240 binfo = CLASS_TYPE_P (type) ? TYPE_BINFO (binfo) : NULL_TREE;
1241 }
1242
1243 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1244 BINFO_OFFSET (new_binfo) = offset;
1245 BINFO_VTABLE (new_binfo) = vtable;
1246 BINFO_VIRTUALS (new_binfo) = virtuals;
1247 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1248
1249 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1250 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1251 return new_binfo;
1252 }
1253
1254 /* Return the binfo value for ELEM in TYPE. */
1255
1256 tree
1257 binfo_value (elem, type)
1258 tree elem;
1259 tree type;
1260 {
1261 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1262 compiler_error ("base class `%s' ambiguous in binfo_value",
1263 TYPE_NAME_STRING (elem));
1264 if (elem == type)
1265 return TYPE_BINFO (type);
1266 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1267 return type;
1268 return get_binfo (elem, type, 0);
1269 }
1270
1271 /* Return a reversed copy of the BINFO-chain given by PATH. (If the
1272 BINFO_INHERITANCE_CHAIN points from base classes to derived
1273 classes, it will instead point from derived classes to base
1274 classes.) Returns the first node in the reversed chain. */
1275
1276 tree
1277 reverse_path (path)
1278 tree path;
1279 {
1280 register tree prev = NULL_TREE, cur;
1281 for (cur = path; cur; cur = BINFO_INHERITANCE_CHAIN (cur))
1282 {
1283 tree r = copy_node (cur);
1284 BINFO_INHERITANCE_CHAIN (r) = prev;
1285 prev = r;
1286 }
1287 return prev;
1288 }
1289
1290 void
1291 debug_binfo (elem)
1292 tree elem;
1293 {
1294 unsigned HOST_WIDE_INT n;
1295 tree virtuals;
1296
1297 fprintf (stderr, "type \"%s\"; offset = %ld\n",
1298 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1299 (long) TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1300 fprintf (stderr, "vtable type:\n");
1301 debug_tree (BINFO_TYPE (elem));
1302 if (BINFO_VTABLE (elem))
1303 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1304 else
1305 fprintf (stderr, "no vtable decl yet\n");
1306 fprintf (stderr, "virtuals:\n");
1307 virtuals = BINFO_VIRTUALS (elem);
1308
1309 n = skip_rtti_stuff (&virtuals, BINFO_TYPE (elem));
1310
1311 while (virtuals)
1312 {
1313 tree fndecl = TREE_VALUE (virtuals);
1314 fprintf (stderr, "%s [%ld =? %ld]\n",
1315 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1316 (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1317 ++n;
1318 virtuals = TREE_CHAIN (virtuals);
1319 }
1320 }
1321
1322 int
1323 count_functions (t)
1324 tree t;
1325 {
1326 int i;
1327 if (TREE_CODE (t) == FUNCTION_DECL)
1328 return 1;
1329 else if (TREE_CODE (t) == OVERLOAD)
1330 {
1331 for (i=0; t; t = OVL_CHAIN (t))
1332 i++;
1333 return i;
1334 }
1335
1336 my_friendly_abort (359);
1337 return 0;
1338 }
1339
1340 int
1341 is_overloaded_fn (x)
1342 tree x;
1343 {
1344 /* A baselink is also considered an overloaded function. */
1345 if (TREE_CODE (x) == OFFSET_REF)
1346 x = TREE_OPERAND (x, 1);
1347 if (BASELINK_P (x))
1348 x = TREE_VALUE (x);
1349 return (TREE_CODE (x) == FUNCTION_DECL
1350 || TREE_CODE (x) == TEMPLATE_ID_EXPR
1351 || DECL_FUNCTION_TEMPLATE_P (x)
1352 || TREE_CODE (x) == OVERLOAD);
1353 }
1354
1355 int
1356 really_overloaded_fn (x)
1357 tree x;
1358 {
1359 /* A baselink is also considered an overloaded function. */
1360 if (TREE_CODE (x) == OFFSET_REF)
1361 x = TREE_OPERAND (x, 1);
1362 if (BASELINK_P (x))
1363 x = TREE_VALUE (x);
1364 return (TREE_CODE (x) == OVERLOAD
1365 && (TREE_CHAIN (x) != NULL_TREE
1366 || DECL_FUNCTION_TEMPLATE_P (OVL_FUNCTION (x))));
1367 }
1368
1369 tree
1370 get_first_fn (from)
1371 tree from;
1372 {
1373 my_friendly_assert (is_overloaded_fn (from), 9);
1374 /* A baselink is also considered an overloaded function. */
1375 if (BASELINK_P (from))
1376 from = TREE_VALUE (from);
1377 return OVL_CURRENT (from);
1378 }
1379
1380 /* Returns nonzero if T is a ->* or .* expression that refers to a
1381 member function. */
1382
1383 int
1384 bound_pmf_p (t)
1385 tree t;
1386 {
1387 return (TREE_CODE (t) == OFFSET_REF
1388 && TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (t, 1))));
1389 }
1390
1391 /* Return a new OVL node, concatenating it with the old one. */
1392
1393 tree
1394 ovl_cons (decl, chain)
1395 tree decl;
1396 tree chain;
1397 {
1398 tree result = make_node (OVERLOAD);
1399 TREE_TYPE (result) = unknown_type_node;
1400 OVL_FUNCTION (result) = decl;
1401 TREE_CHAIN (result) = chain;
1402
1403 return result;
1404 }
1405
1406 /* Build a new overloaded function. If this is the first one,
1407 just return it; otherwise, ovl_cons the _DECLs */
1408
1409 tree
1410 build_overload (decl, chain)
1411 tree decl;
1412 tree chain;
1413 {
1414 if (! chain && TREE_CODE (decl) != TEMPLATE_DECL)
1415 return decl;
1416 if (chain && TREE_CODE (chain) != OVERLOAD)
1417 chain = ovl_cons (chain, NULL_TREE);
1418 return ovl_cons (decl, chain);
1419 }
1420
1421 /* True if fn is in ovl. */
1422
1423 int
1424 ovl_member (fn, ovl)
1425 tree fn;
1426 tree ovl;
1427 {
1428 if (ovl == NULL_TREE)
1429 return 0;
1430 if (TREE_CODE (ovl) != OVERLOAD)
1431 return ovl == fn;
1432 for (; ovl; ovl = OVL_CHAIN (ovl))
1433 if (OVL_FUNCTION (ovl) == fn)
1434 return 1;
1435 return 0;
1436 }
1437
1438 int
1439 is_aggr_type_2 (t1, t2)
1440 tree t1, t2;
1441 {
1442 if (TREE_CODE (t1) != TREE_CODE (t2))
1443 return 0;
1444 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1445 }
1446
1447 /* Returns non-zero if CODE is the code for a statement. */
1448
1449 static int
1450 statement_code_p (code)
1451 enum tree_code code;
1452 {
1453 switch (code)
1454 {
1455 case EXPR_STMT:
1456 case COMPOUND_STMT:
1457 case DECL_STMT:
1458 case IF_STMT:
1459 case FOR_STMT:
1460 case WHILE_STMT:
1461 case DO_STMT:
1462 case RETURN_STMT:
1463 case BREAK_STMT:
1464 case CONTINUE_STMT:
1465 case SWITCH_STMT:
1466 case GOTO_STMT:
1467 case LABEL_STMT:
1468 case ASM_STMT:
1469 case SUBOBJECT:
1470 case CLEANUP_STMT:
1471 case START_CATCH_STMT:
1472 case CTOR_STMT:
1473 case SCOPE_STMT:
1474 case CTOR_INITIALIZER:
1475 case CASE_LABEL:
1476 case RETURN_INIT:
1477 case TRY_BLOCK:
1478 case HANDLER:
1479 return 1;
1480
1481 default:
1482 return 0;
1483 }
1484 }
1485 \f
1486 #define PRINT_RING_SIZE 4
1487
1488 const char *
1489 lang_printable_name (decl, v)
1490 tree decl;
1491 int v;
1492 {
1493 static tree decl_ring[PRINT_RING_SIZE];
1494 static char *print_ring[PRINT_RING_SIZE];
1495 static int ring_counter;
1496 int i;
1497
1498 /* Only cache functions. */
1499 if (v < 2
1500 || TREE_CODE (decl) != FUNCTION_DECL
1501 || DECL_LANG_SPECIFIC (decl) == 0)
1502 return lang_decl_name (decl, v);
1503
1504 /* See if this print name is lying around. */
1505 for (i = 0; i < PRINT_RING_SIZE; i++)
1506 if (decl_ring[i] == decl)
1507 /* yes, so return it. */
1508 return print_ring[i];
1509
1510 if (++ring_counter == PRINT_RING_SIZE)
1511 ring_counter = 0;
1512
1513 if (current_function_decl != NULL_TREE)
1514 {
1515 if (decl_ring[ring_counter] == current_function_decl)
1516 ring_counter += 1;
1517 if (ring_counter == PRINT_RING_SIZE)
1518 ring_counter = 0;
1519 if (decl_ring[ring_counter] == current_function_decl)
1520 my_friendly_abort (106);
1521 }
1522
1523 if (print_ring[ring_counter])
1524 free (print_ring[ring_counter]);
1525
1526 print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v));
1527 decl_ring[ring_counter] = decl;
1528 return print_ring[ring_counter];
1529 }
1530 \f
1531 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1532 listed in RAISES. */
1533
1534 tree
1535 build_exception_variant (type, raises)
1536 tree type;
1537 tree raises;
1538 {
1539 tree v = TYPE_MAIN_VARIANT (type);
1540 int type_quals = TYPE_QUALS (type);
1541
1542 for (; v; v = TYPE_NEXT_VARIANT (v))
1543 if (TYPE_QUALS (v) == type_quals
1544 && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1))
1545 return v;
1546
1547 /* Need to build a new variant. */
1548 v = build_type_copy (type);
1549 TYPE_RAISES_EXCEPTIONS (v) = raises;
1550 return v;
1551 }
1552
1553 /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new one together with its
1554 lang_specific field and its corresponding TEMPLATE_DECL node */
1555
1556 tree
1557 copy_template_template_parm (t)
1558 tree t;
1559 {
1560 tree template = TYPE_NAME (t);
1561 tree t2;
1562
1563 t2 = make_aggr_type (TEMPLATE_TEMPLATE_PARM);
1564 template = copy_node (template);
1565 copy_lang_decl (template);
1566
1567 TREE_TYPE (template) = t2;
1568 TYPE_NAME (t2) = template;
1569 TYPE_STUB_DECL (t2) = template;
1570
1571 /* No need to copy these */
1572 TYPE_FIELDS (t2) = TYPE_FIELDS (t);
1573 TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2)
1574 = TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t);
1575 return t2;
1576 }
1577
1578 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1579 FUNC is called with the DATA and the address of each sub-tree. If
1580 FUNC returns a non-NULL value, the traversal is aborted, and the
1581 value returned by FUNC is returned. */
1582
1583 tree
1584 walk_tree (tp, func, data)
1585 tree *tp;
1586 walk_tree_fn func;
1587 void *data;
1588 {
1589 enum tree_code code;
1590 int walk_subtrees;
1591 tree result;
1592
1593 #define WALK_SUBTREE(NODE) \
1594 do \
1595 { \
1596 result = walk_tree (&(NODE), func, data); \
1597 if (result) \
1598 return result; \
1599 } \
1600 while (0)
1601
1602 /* Skip empty subtrees. */
1603 if (!*tp)
1604 return NULL_TREE;
1605
1606 /* Call the function. */
1607 walk_subtrees = 1;
1608 result = (*func) (tp, &walk_subtrees, data);
1609
1610 /* If we found something, return it. */
1611 if (result)
1612 return result;
1613
1614 /* Even if we didn't, FUNC may have decided that there was nothing
1615 interesting below this point in the tree. */
1616 if (!walk_subtrees)
1617 return NULL_TREE;
1618
1619 code = TREE_CODE (*tp);
1620
1621 /* Handle common cases up front. */
1622 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1623 || TREE_CODE_CLASS (code) == 'r'
1624 || TREE_CODE_CLASS (code) == 's')
1625 {
1626 int i, len;
1627
1628 /* Walk over all the sub-trees of this operand. */
1629 len = first_rtl_op (code);
1630 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1631 But, we only want to walk once. */
1632 if (code == TARGET_EXPR
1633 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1634 --len;
1635 /* Go through the subtrees. We need to do this in forward order so
1636 that the scope of a FOR_EXPR is handled properly. */
1637 for (i = 0; i < len; ++i)
1638 WALK_SUBTREE (TREE_OPERAND (*tp, i));
1639
1640 /* For statements, we also walk the chain so that we cover the
1641 entire statement tree. */
1642 if (statement_code_p (code))
1643 {
1644 if (code == DECL_STMT
1645 && DECL_STMT_DECL (*tp)
1646 && TREE_CODE_CLASS (TREE_CODE (DECL_STMT_DECL (*tp))) == 'd')
1647 {
1648 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1649 into declarations that are just mentioned, rather than
1650 declared; they don't really belong to this part of the tree.
1651 And, we can see cycles: the initializer for a declaration can
1652 refer to the declaration itself. */
1653 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1654 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1655 }
1656
1657 WALK_SUBTREE (TREE_CHAIN (*tp));
1658 }
1659
1660 /* We didn't find what we were looking for. */
1661 return NULL_TREE;
1662 }
1663 else if (TREE_CODE_CLASS (code) == 'd')
1664 {
1665 WALK_SUBTREE (TREE_TYPE (*tp));
1666
1667 /* We didn't find what we were looking for. */
1668 return NULL_TREE;
1669 }
1670
1671 /* Not one of the easy cases. We must explicitly go through the
1672 children. */
1673 switch (code)
1674 {
1675 case ERROR_MARK:
1676 case IDENTIFIER_NODE:
1677 case INTEGER_CST:
1678 case REAL_CST:
1679 case STRING_CST:
1680 case DEFAULT_ARG:
1681 case TEMPLATE_TEMPLATE_PARM:
1682 case TEMPLATE_PARM_INDEX:
1683 case TEMPLATE_TYPE_PARM:
1684 case REAL_TYPE:
1685 case COMPLEX_TYPE:
1686 case VOID_TYPE:
1687 case BOOLEAN_TYPE:
1688 case TYPENAME_TYPE:
1689 case UNION_TYPE:
1690 case ENUMERAL_TYPE:
1691 case TYPEOF_TYPE:
1692 case BLOCK:
1693 /* None of thse have subtrees other than those already walked
1694 above. */
1695 break;
1696
1697 case PTRMEM_CST:
1698 WALK_SUBTREE (TREE_TYPE (*tp));
1699 break;
1700
1701 case POINTER_TYPE:
1702 case REFERENCE_TYPE:
1703 WALK_SUBTREE (TREE_TYPE (*tp));
1704 break;
1705
1706 case TREE_LIST:
1707 WALK_SUBTREE (TREE_PURPOSE (*tp));
1708 WALK_SUBTREE (TREE_VALUE (*tp));
1709 WALK_SUBTREE (TREE_CHAIN (*tp));
1710 break;
1711
1712 case OVERLOAD:
1713 WALK_SUBTREE (OVL_FUNCTION (*tp));
1714 WALK_SUBTREE (OVL_CHAIN (*tp));
1715 break;
1716
1717 case TREE_VEC:
1718 {
1719 int len = TREE_VEC_LENGTH (*tp);
1720 while (len--)
1721 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1722 }
1723 break;
1724
1725 case COMPLEX_CST:
1726 WALK_SUBTREE (TREE_REALPART (*tp));
1727 WALK_SUBTREE (TREE_IMAGPART (*tp));
1728 break;
1729
1730 case CONSTRUCTOR:
1731 WALK_SUBTREE (CONSTRUCTOR_ELTS (*tp));
1732 break;
1733
1734 case METHOD_TYPE:
1735 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1736 /* Fall through. */
1737
1738 case FUNCTION_TYPE:
1739 WALK_SUBTREE (TREE_TYPE (*tp));
1740 WALK_SUBTREE (TYPE_ARG_TYPES (*tp));
1741 break;
1742
1743 case ARRAY_TYPE:
1744 WALK_SUBTREE (TREE_TYPE (*tp));
1745 WALK_SUBTREE (TYPE_DOMAIN (*tp));
1746 break;
1747
1748 case INTEGER_TYPE:
1749 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1750 WALK_SUBTREE (TYPE_MAX_VALUE (*tp));
1751 break;
1752
1753 case OFFSET_TYPE:
1754 WALK_SUBTREE (TREE_TYPE (*tp));
1755 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (*tp));
1756 break;
1757
1758 case RECORD_TYPE:
1759 if (TYPE_PTRMEMFUNC_P (*tp))
1760 WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp));
1761 break;
1762
1763 default:
1764 my_friendly_abort (19990803);
1765 }
1766
1767 /* We didn't find what we were looking for. */
1768 return NULL_TREE;
1769
1770 #undef WALK_SUBTREE
1771 }
1772
1773 /* Passed to walk_tree. Checks for the use of types with no linkage. */
1774
1775 static tree
1776 no_linkage_helper (tp, walk_subtrees, data)
1777 tree *tp;
1778 int *walk_subtrees ATTRIBUTE_UNUSED;
1779 void *data ATTRIBUTE_UNUSED;
1780 {
1781 tree t = *tp;
1782
1783 if (TYPE_P (t)
1784 && (IS_AGGR_TYPE (t) || TREE_CODE (t) == ENUMERAL_TYPE)
1785 && (decl_function_context (TYPE_MAIN_DECL (t))
1786 || ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))))
1787 return t;
1788 return NULL_TREE;
1789 }
1790
1791 /* Check if the type T depends on a type with no linkage and if so, return
1792 it. */
1793
1794 tree
1795 no_linkage_check (t)
1796 tree t;
1797 {
1798 /* There's no point in checking linkage on template functions; we
1799 can't know their complete types. */
1800 if (processing_template_decl)
1801 return NULL_TREE;
1802
1803 t = walk_tree (&t, no_linkage_helper, NULL);
1804 if (t != error_mark_node)
1805 return t;
1806 return NULL_TREE;
1807 }
1808
1809 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1810
1811 tree
1812 copy_tree_r (tp, walk_subtrees, data)
1813 tree *tp;
1814 int *walk_subtrees;
1815 void *data ATTRIBUTE_UNUSED;
1816 {
1817 enum tree_code code = TREE_CODE (*tp);
1818
1819 /* We make copies of most nodes. */
1820 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1821 || TREE_CODE_CLASS (code) == 'r'
1822 || TREE_CODE_CLASS (code) == 'c'
1823 || TREE_CODE_CLASS (code) == 's'
1824 || code == PARM_DECL
1825 || code == TREE_LIST
1826 || code == TREE_VEC
1827 || code == OVERLOAD)
1828 {
1829 /* Because the chain gets clobbered when we make a copy, we save it
1830 here. */
1831 tree chain = TREE_CHAIN (*tp);
1832
1833 /* Copy the node. */
1834 *tp = copy_node (*tp);
1835
1836 /* Now, restore the chain, if appropriate. That will cause
1837 walk_tree to walk into the chain as well. */
1838 if (code == PARM_DECL || code == TREE_LIST || code == OVERLOAD
1839 || statement_code_p (code))
1840 TREE_CHAIN (*tp) = chain;
1841
1842 /* For now, we don't update BLOCKs when we make copies. So, we
1843 have to nullify all scope-statements. */
1844 if (TREE_CODE (*tp) == SCOPE_STMT)
1845 SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
1846 }
1847 else if (code == TEMPLATE_TEMPLATE_PARM)
1848 /* These must be copied specially. */
1849 *tp = copy_template_template_parm (*tp);
1850 else if (TREE_CODE_CLASS (code) == 't')
1851 /* There's no need to copy types, or anything beneath them. */
1852 *walk_subtrees = 0;
1853
1854 return NULL_TREE;
1855 }
1856
1857 #ifdef GATHER_STATISTICS
1858 extern int depth_reached;
1859 #endif
1860
1861 void
1862 print_lang_statistics ()
1863 {
1864 print_search_statistics ();
1865 print_class_statistics ();
1866 #ifdef GATHER_STATISTICS
1867 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
1868 depth_reached);
1869 #endif
1870 }
1871
1872 /* This is used by the `assert' macro. It is provided in libgcc.a,
1873 which `cc' doesn't know how to link. Note that the C++ front-end
1874 no longer actually uses the `assert' macro (instead, it calls
1875 my_friendly_assert). But all of the back-end files still need this. */
1876
1877 void
1878 __eprintf (string, expression, line, filename)
1879 const char *string;
1880 const char *expression;
1881 unsigned line;
1882 const char *filename;
1883 {
1884 fprintf (stderr, string, expression, line, filename);
1885 fflush (stderr);
1886 abort ();
1887 }
1888
1889 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1890 (which is an ARRAY_TYPE). This counts only elements of the top
1891 array. */
1892
1893 tree
1894 array_type_nelts_top (type)
1895 tree type;
1896 {
1897 return fold (build (PLUS_EXPR, sizetype,
1898 array_type_nelts (type),
1899 integer_one_node));
1900 }
1901
1902 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1903 (which is an ARRAY_TYPE). This one is a recursive count of all
1904 ARRAY_TYPEs that are clumped together. */
1905
1906 tree
1907 array_type_nelts_total (type)
1908 tree type;
1909 {
1910 tree sz = array_type_nelts_top (type);
1911 type = TREE_TYPE (type);
1912 while (TREE_CODE (type) == ARRAY_TYPE)
1913 {
1914 tree n = array_type_nelts_top (type);
1915 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1916 type = TREE_TYPE (type);
1917 }
1918 return sz;
1919 }
1920
1921 /* Called from break_out_target_exprs via mapcar. */
1922
1923 static tree
1924 bot_manip (tp, walk_subtrees, data)
1925 tree *tp;
1926 int *walk_subtrees;
1927 void *data;
1928 {
1929 splay_tree target_remap = ((splay_tree) data);
1930 tree t = *tp;
1931
1932 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1933 {
1934 /* There can't be any TARGET_EXPRs below this point. */
1935 *walk_subtrees = 0;
1936 return NULL_TREE;
1937 }
1938 else if (TREE_CODE (t) == TARGET_EXPR)
1939 {
1940 tree u;
1941
1942 if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
1943 {
1944 mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0));
1945 u = build_cplus_new
1946 (TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1)));
1947 }
1948 else
1949 {
1950 u = copy_node (t);
1951 TREE_OPERAND (u, 0) = build (VAR_DECL, TREE_TYPE (t));
1952 layout_decl (TREE_OPERAND (u, 0), 0);
1953 }
1954
1955 /* Map the old variable to the new one. */
1956 splay_tree_insert (target_remap,
1957 (splay_tree_key) TREE_OPERAND (t, 0),
1958 (splay_tree_value) TREE_OPERAND (u, 0));
1959
1960 /* Replace the old expression with the new version. */
1961 *tp = u;
1962 /* We don't have to go below this point; the recursive call to
1963 break_out_target_exprs will have handled anything below this
1964 point. */
1965 *walk_subtrees = 0;
1966 return NULL_TREE;
1967 }
1968 else if (TREE_CODE (t) == CALL_EXPR)
1969 mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
1970
1971 /* Make a copy of this node. */
1972 return copy_tree_r (tp, walk_subtrees, NULL);
1973 }
1974
1975 /* Replace all remapped VAR_DECLs in T with their new equivalents.
1976 DATA is really a splay-tree mapping old variables to new
1977 variables. */
1978
1979 static tree
1980 bot_replace (t, walk_subtrees, data)
1981 tree *t;
1982 int *walk_subtrees ATTRIBUTE_UNUSED;
1983 void *data;
1984 {
1985 splay_tree target_remap = ((splay_tree) data);
1986
1987 if (TREE_CODE (*t) == VAR_DECL)
1988 {
1989 splay_tree_node n = splay_tree_lookup (target_remap,
1990 (splay_tree_key) *t);
1991 if (n)
1992 *t = (tree) n->value;
1993 }
1994
1995 return NULL_TREE;
1996 }
1997
1998 /* When we parse a default argument expression, we may create
1999 temporary variables via TARGET_EXPRs. When we actually use the
2000 default-argument expression, we make a copy of the expression, but
2001 we must replace the temporaries with appropriate local versions. */
2002
2003 tree
2004 break_out_target_exprs (t)
2005 tree t;
2006 {
2007 static int target_remap_count;
2008 static splay_tree target_remap;
2009
2010 if (!target_remap_count++)
2011 target_remap = splay_tree_new (splay_tree_compare_pointers,
2012 /*splay_tree_delete_key_fn=*/NULL,
2013 /*splay_tree_delete_value_fn=*/NULL);
2014 walk_tree (&t, bot_manip, target_remap);
2015 walk_tree (&t, bot_replace, target_remap);
2016
2017 if (!--target_remap_count)
2018 {
2019 splay_tree_delete (target_remap);
2020 target_remap = NULL;
2021 }
2022
2023 return t;
2024 }
2025
2026 /* Obstack used for allocating nodes in template function and variable
2027 definitions. */
2028
2029 /* Similar to `build_nt', except that we set TREE_COMPLEXITY to be the
2030 current line number. */
2031
2032 tree
2033 build_min_nt VPROTO((enum tree_code code, ...))
2034 {
2035 #ifndef ANSI_PROTOTYPES
2036 enum tree_code code;
2037 #endif
2038 va_list p;
2039 register tree t;
2040 register int length;
2041 register int i;
2042
2043 VA_START (p, code);
2044
2045 #ifndef ANSI_PROTOTYPES
2046 code = va_arg (p, enum tree_code);
2047 #endif
2048
2049 t = make_node (code);
2050 length = tree_code_length[(int) code];
2051 TREE_COMPLEXITY (t) = lineno;
2052
2053 for (i = 0; i < length; i++)
2054 {
2055 tree x = va_arg (p, tree);
2056 TREE_OPERAND (t, i) = x;
2057 }
2058
2059 va_end (p);
2060 return t;
2061 }
2062
2063 /* Similar to `build', except we set TREE_COMPLEXITY to the current
2064 line-number. */
2065
2066 tree
2067 build_min VPROTO((enum tree_code code, tree tt, ...))
2068 {
2069 #ifndef ANSI_PROTOTYPES
2070 enum tree_code code;
2071 tree tt;
2072 #endif
2073 va_list p;
2074 register tree t;
2075 register int length;
2076 register int i;
2077
2078 VA_START (p, tt);
2079
2080 #ifndef ANSI_PROTOTYPES
2081 code = va_arg (p, enum tree_code);
2082 tt = va_arg (p, tree);
2083 #endif
2084
2085 t = make_node (code);
2086 length = tree_code_length[(int) code];
2087 TREE_TYPE (t) = tt;
2088 TREE_COMPLEXITY (t) = lineno;
2089
2090 for (i = 0; i < length; i++)
2091 {
2092 tree x = va_arg (p, tree);
2093 TREE_OPERAND (t, i) = x;
2094 }
2095
2096 va_end (p);
2097 return t;
2098 }
2099
2100 tree
2101 get_type_decl (t)
2102 tree t;
2103 {
2104 if (TREE_CODE (t) == TYPE_DECL)
2105 return t;
2106 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
2107 return TYPE_STUB_DECL (t);
2108
2109 my_friendly_abort (42);
2110
2111 /* Stop compiler from complaining control reaches end of non-void function. */
2112 return 0;
2113 }
2114
2115 int
2116 can_free (obstack, t)
2117 struct obstack *obstack;
2118 tree t;
2119 {
2120 int size = 0;
2121
2122 if (TREE_CODE (t) == TREE_VEC)
2123 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
2124 else
2125 my_friendly_abort (42);
2126
2127 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
2128 & ~ obstack_alignment_mask (obstack))
2129 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
2130 return 1;
2131 #undef ROUND
2132
2133 return 0;
2134 }
2135
2136 /* Return first vector element whose BINFO_TYPE is ELEM.
2137 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
2138
2139 tree
2140 vec_binfo_member (elem, vec)
2141 tree elem, vec;
2142 {
2143 int i;
2144
2145 if (vec)
2146 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
2147 if (same_type_p (elem, BINFO_TYPE (TREE_VEC_ELT (vec, i))))
2148 return TREE_VEC_ELT (vec, i);
2149
2150 return NULL_TREE;
2151 }
2152
2153 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2154 the wrong thing for decl_function_context. Hopefully the uses in the
2155 backend won't matter, since we don't need a static chain for local class
2156 methods. FIXME! */
2157
2158 tree
2159 hack_decl_function_context (decl)
2160 tree decl;
2161 {
2162 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
2163 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
2164 return decl_function_context (decl);
2165 }
2166
2167 /* Returns the namespace that contains DECL, whether directly or
2168 indirectly. */
2169
2170 tree
2171 decl_namespace_context (decl)
2172 tree decl;
2173 {
2174 while (1)
2175 {
2176 if (TREE_CODE (decl) == NAMESPACE_DECL)
2177 return decl;
2178 else if (TYPE_P (decl))
2179 decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl));
2180 else
2181 decl = CP_DECL_CONTEXT (decl);
2182 }
2183 }
2184
2185 /* Return truthvalue of whether T1 is the same tree structure as T2.
2186 Return 1 if they are the same.
2187 Return 0 if they are understandably different.
2188 Return -1 if either contains tree structure not understood by
2189 this function. */
2190
2191 int
2192 cp_tree_equal (t1, t2)
2193 tree t1, t2;
2194 {
2195 register enum tree_code code1, code2;
2196 int cmp;
2197
2198 if (t1 == t2)
2199 return 1;
2200 if (t1 == 0 || t2 == 0)
2201 return 0;
2202
2203 code1 = TREE_CODE (t1);
2204 code2 = TREE_CODE (t2);
2205
2206 if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
2207 {
2208 if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
2209 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2210 else
2211 return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
2212 }
2213 else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
2214 || code2 == NON_LVALUE_EXPR)
2215 return cp_tree_equal (t1, TREE_OPERAND (t2, 0));
2216
2217 if (code1 != code2)
2218 return 0;
2219
2220 switch (code1)
2221 {
2222 case INTEGER_CST:
2223 return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
2224 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);
2225
2226 case REAL_CST:
2227 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
2228
2229 case STRING_CST:
2230 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
2231 && !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2232 TREE_STRING_LENGTH (t1));
2233
2234 case CONSTRUCTOR:
2235 /* We need to do this when determining whether or not two
2236 non-type pointer to member function template arguments
2237 are the same. */
2238 if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
2239 /* The first operand is RTL. */
2240 && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0)))
2241 return 0;
2242 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2243
2244 case TREE_LIST:
2245 cmp = cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2246 if (cmp <= 0)
2247 return cmp;
2248 cmp = cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2));
2249 if (cmp <= 0)
2250 return cmp;
2251 return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
2252
2253 case SAVE_EXPR:
2254 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2255
2256 case CALL_EXPR:
2257 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2258 if (cmp <= 0)
2259 return cmp;
2260 return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2261
2262 case TARGET_EXPR:
2263 /* Special case: if either target is an unallocated VAR_DECL,
2264 it means that it's going to be unified with whatever the
2265 TARGET_EXPR is really supposed to initialize, so treat it
2266 as being equivalent to anything. */
2267 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
2268 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
2269 && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
2270 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
2271 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
2272 && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
2273 cmp = 1;
2274 else
2275 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2276 if (cmp <= 0)
2277 return cmp;
2278 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2279
2280 case WITH_CLEANUP_EXPR:
2281 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2282 if (cmp <= 0)
2283 return cmp;
2284 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));
2285
2286 case COMPONENT_REF:
2287 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
2288 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2289 return 0;
2290
2291 case VAR_DECL:
2292 case PARM_DECL:
2293 case CONST_DECL:
2294 case FUNCTION_DECL:
2295 return 0;
2296
2297 case TEMPLATE_PARM_INDEX:
2298 return TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
2299 && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2);
2300
2301 case SIZEOF_EXPR:
2302 case ALIGNOF_EXPR:
2303 if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
2304 return 0;
2305 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2306 return same_type_p (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2307 break;
2308
2309 case PTRMEM_CST:
2310 /* Two pointer-to-members are the same if they point to the same
2311 field or function in the same class. */
2312 return (PTRMEM_CST_MEMBER (t1) == PTRMEM_CST_MEMBER (t2)
2313 && same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)));
2314
2315 default:
2316 break;
2317 }
2318
2319 switch (TREE_CODE_CLASS (code1))
2320 {
2321 int i;
2322 case '1':
2323 case '2':
2324 case '<':
2325 case 'e':
2326 case 'r':
2327 case 's':
2328 cmp = 1;
2329 for (i=0; i<tree_code_length[(int) code1]; ++i)
2330 {
2331 cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
2332 if (cmp <= 0)
2333 return cmp;
2334 }
2335 return cmp;
2336 }
2337
2338 return -1;
2339 }
2340
2341 /* Build a wrapper around some pointer PTR so we can use it as a tree. */
2342
2343 tree
2344 build_ptr_wrapper (ptr)
2345 void *ptr;
2346 {
2347 tree t = make_node (WRAPPER);
2348 WRAPPER_PTR (t) = ptr;
2349 return t;
2350 }
2351
2352 /* Same, but on the expression_obstack. */
2353
2354 tree
2355 build_expr_ptr_wrapper (ptr)
2356 void *ptr;
2357 {
2358 return build_ptr_wrapper (ptr);
2359 }
2360
2361 /* Build a wrapper around some integer I so we can use it as a tree. */
2362
2363 tree
2364 build_int_wrapper (i)
2365 int i;
2366 {
2367 tree t = make_node (WRAPPER);
2368 WRAPPER_INT (t) = i;
2369 return t;
2370 }
2371
2372 static tree
2373 build_srcloc (file, line)
2374 char *file;
2375 int line;
2376 {
2377 tree t;
2378
2379 t = make_node (SRCLOC);
2380 SRCLOC_FILE (t) = file;
2381 SRCLOC_LINE (t) = line;
2382
2383 return t;
2384 }
2385
2386 tree
2387 build_srcloc_here ()
2388 {
2389 return build_srcloc (input_filename, lineno);
2390 }
2391
2392 /* The type of ARG when used as an lvalue. */
2393
2394 tree
2395 lvalue_type (arg)
2396 tree arg;
2397 {
2398 tree type = TREE_TYPE (arg);
2399 if (TREE_CODE (arg) == OVERLOAD)
2400 type = unknown_type_node;
2401 return type;
2402 }
2403
2404 /* The type of ARG for printing error messages; denote lvalues with
2405 reference types. */
2406
2407 tree
2408 error_type (arg)
2409 tree arg;
2410 {
2411 tree type = TREE_TYPE (arg);
2412 if (TREE_CODE (type) == ARRAY_TYPE)
2413 ;
2414 else if (real_lvalue_p (arg))
2415 type = build_reference_type (lvalue_type (arg));
2416 else if (IS_AGGR_TYPE (type))
2417 type = lvalue_type (arg);
2418
2419 return type;
2420 }
2421
2422 /* Does FUNCTION use a variable-length argument list? */
2423
2424 int
2425 varargs_function_p (function)
2426 tree function;
2427 {
2428 tree parm = TYPE_ARG_TYPES (TREE_TYPE (function));
2429 for (; parm; parm = TREE_CHAIN (parm))
2430 if (TREE_VALUE (parm) == void_type_node)
2431 return 0;
2432 return 1;
2433 }
2434
2435 /* Returns 1 if decl is a member of a class. */
2436
2437 int
2438 member_p (decl)
2439 tree decl;
2440 {
2441 tree ctx = DECL_CONTEXT (decl);
2442 return (ctx && TREE_CODE_CLASS (TREE_CODE (ctx)) == 't');
2443 }
2444
2445 /* Create a placeholder for member access where we don't actually have an
2446 object that the access is against. */
2447
2448 tree
2449 build_dummy_object (type)
2450 tree type;
2451 {
2452 tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node);
2453 return build_indirect_ref (decl, NULL_PTR);
2454 }
2455
2456 /* We've gotten a reference to a member of TYPE. Return *this if appropriate,
2457 or a dummy object otherwise. If BINFOP is non-0, it is filled with the
2458 binfo path from current_class_type to TYPE, or 0. */
2459
2460 tree
2461 maybe_dummy_object (type, binfop)
2462 tree type;
2463 tree *binfop;
2464 {
2465 tree decl, context;
2466
2467 if (current_class_type
2468 && get_base_distance (type, current_class_type, 0, binfop) != -1)
2469 context = current_class_type;
2470 else
2471 {
2472 /* Reference from a nested class member function. */
2473 context = type;
2474 if (binfop)
2475 *binfop = TYPE_BINFO (type);
2476 }
2477
2478 if (current_class_ref && context == current_class_type)
2479 decl = current_class_ref;
2480 else
2481 decl = build_dummy_object (context);
2482
2483 return decl;
2484 }
2485
2486 /* Returns 1 if OB is a placeholder object, or a pointer to one. */
2487
2488 int
2489 is_dummy_object (ob)
2490 tree ob;
2491 {
2492 if (TREE_CODE (ob) == INDIRECT_REF)
2493 ob = TREE_OPERAND (ob, 0);
2494 return (TREE_CODE (ob) == NOP_EXPR
2495 && TREE_OPERAND (ob, 0) == void_zero_node);
2496 }
2497
2498 /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */
2499
2500 int
2501 pod_type_p (t)
2502 tree t;
2503 {
2504 while (TREE_CODE (t) == ARRAY_TYPE)
2505 t = TREE_TYPE (t);
2506
2507 if (INTEGRAL_TYPE_P (t))
2508 return 1; /* integral, character or enumeral type */
2509 if (FLOAT_TYPE_P (t))
2510 return 1;
2511 if (TYPE_PTR_P (t))
2512 return 1; /* pointer to non-member */
2513 if (TYPE_PTRMEM_P (t))
2514 return 1; /* pointer to member object */
2515 if (TYPE_PTRMEMFUNC_P (t))
2516 return 1; /* pointer to member function */
2517
2518 if (! CLASS_TYPE_P (t))
2519 return 0; /* other non-class type (reference or function) */
2520 if (CLASSTYPE_NON_POD_P (t))
2521 return 0;
2522 return 1;
2523 }
2524
2525 /* Return a 1 if ATTR_NAME and ATTR_ARGS denote a valid C++-specific
2526 attribute for either declaration DECL or type TYPE and 0 otherwise.
2527 Plugged into valid_lang_attribute. */
2528
2529 int
2530 cp_valid_lang_attribute (attr_name, attr_args, decl, type)
2531 tree attr_name;
2532 tree attr_args ATTRIBUTE_UNUSED;
2533 tree decl ATTRIBUTE_UNUSED;
2534 tree type ATTRIBUTE_UNUSED;
2535 {
2536 if (is_attribute_p ("com_interface", attr_name))
2537 {
2538 if (! flag_vtable_thunks)
2539 {
2540 error ("`com_interface' only supported with -fvtable-thunks");
2541 return 0;
2542 }
2543
2544 if (attr_args != NULL_TREE
2545 || decl != NULL_TREE
2546 || ! CLASS_TYPE_P (type)
2547 || type != TYPE_MAIN_VARIANT (type))
2548 {
2549 warning ("`com_interface' attribute can only be applied to class definitions");
2550 return 0;
2551 }
2552
2553 CLASSTYPE_COM_INTERFACE (type) = 1;
2554 return 1;
2555 }
2556 else if (is_attribute_p ("init_priority", attr_name))
2557 {
2558 tree initp_expr = (attr_args ? TREE_VALUE (attr_args): NULL_TREE);
2559 int pri;
2560
2561 if (initp_expr)
2562 STRIP_NOPS (initp_expr);
2563
2564 if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST)
2565 {
2566 error ("requested init_priority is not an integer constant");
2567 return 0;
2568 }
2569
2570 pri = TREE_INT_CST_LOW (initp_expr);
2571
2572 while (TREE_CODE (type) == ARRAY_TYPE)
2573 type = TREE_TYPE (type);
2574
2575 if (decl == NULL_TREE
2576 || TREE_CODE (decl) != VAR_DECL
2577 || ! TREE_STATIC (decl)
2578 || DECL_EXTERNAL (decl)
2579 || (TREE_CODE (type) != RECORD_TYPE
2580 && TREE_CODE (type) != UNION_TYPE)
2581 /* Static objects in functions are initialized the
2582 first time control passes through that
2583 function. This is not precise enough to pin down an
2584 init_priority value, so don't allow it. */
2585 || current_function_decl)
2586 {
2587 error ("can only use init_priority attribute on file-scope definitions of objects of class type");
2588 return 0;
2589 }
2590
2591 if (pri > MAX_INIT_PRIORITY || pri <= 0)
2592 {
2593 error ("requested init_priority is out of range");
2594 return 0;
2595 }
2596
2597 /* Check for init_priorities that are reserved for
2598 language and runtime support implementations.*/
2599 if (pri <= MAX_RESERVED_INIT_PRIORITY)
2600 {
2601 warning
2602 ("requested init_priority is reserved for internal use");
2603 }
2604
2605 DECL_INIT_PRIORITY (decl) = pri;
2606 return 1;
2607 }
2608
2609 return 0;
2610 }
2611
2612 /* Return a new PTRMEM_CST of the indicated TYPE. The MEMBER is the
2613 thing pointed to by the constant. */
2614
2615 tree
2616 make_ptrmem_cst (type, member)
2617 tree type;
2618 tree member;
2619 {
2620 tree ptrmem_cst = make_node (PTRMEM_CST);
2621 /* If would seem a great convenience if make_node would set
2622 TREE_CONSTANT for things of class `c', but it does not. */
2623 TREE_CONSTANT (ptrmem_cst) = 1;
2624 TREE_TYPE (ptrmem_cst) = type;
2625 PTRMEM_CST_MEMBER (ptrmem_cst) = member;
2626 return ptrmem_cst;
2627 }
2628
2629 /* Mark ARG (which is really a list_hash_table **) for GC. */
2630
2631 static void
2632 mark_list_hash (arg)
2633 void *arg;
2634 {
2635 struct list_hash *lh;
2636
2637 for (lh = * ((struct list_hash **) arg); lh; lh = lh->next)
2638 ggc_mark_tree (lh->list);
2639 }
2640
2641 /* Initialize tree.c. */
2642
2643 void
2644 init_tree ()
2645 {
2646 make_lang_type_fn = cp_make_lang_type;
2647 lang_unsave = cp_unsave;
2648 ggc_add_root (list_hash_table,
2649 sizeof (list_hash_table) / sizeof (struct list_hash *),
2650 sizeof (struct list_hash *),
2651 mark_list_hash);
2652 }
2653
2654 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2655 information indicating to what new SAVE_EXPR this one should be
2656 mapped, use that one. Otherwise, create a new node and enter it in
2657 ST. FN is the function into which the copy will be placed. */
2658
2659 void
2660 remap_save_expr (tp, st, fn, walk_subtrees)
2661 tree *tp;
2662 splay_tree st;
2663 tree fn;
2664 int *walk_subtrees;
2665 {
2666 splay_tree_node n;
2667
2668 /* See if we already encountered this SAVE_EXPR. */
2669 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2670
2671 /* If we didn't already remap this SAVE_EXPR, do so now. */
2672 if (!n)
2673 {
2674 tree t = copy_node (*tp);
2675
2676 /* The SAVE_EXPR is now part of the function into which we
2677 are inlining this body. */
2678 SAVE_EXPR_CONTEXT (t) = fn;
2679 /* And we haven't evaluated it yet. */
2680 SAVE_EXPR_RTL (t) = NULL_RTX;
2681 /* Remember this SAVE_EXPR. */
2682 n = splay_tree_insert (st,
2683 (splay_tree_key) *tp,
2684 (splay_tree_value) t);
2685 }
2686 else
2687 /* We've already walked into this SAVE_EXPR, so we needn't do it
2688 again. */
2689 *walk_subtrees = 0;
2690
2691 /* Replace this SAVE_EXPR with the copy. */
2692 *tp = (tree) n->value;
2693 }
2694
2695 /* Called via walk_tree. If *TP points to a DECL_STMT for a local
2696 declaration, copies the declaration and enters it in the splay_tree
2697 pointed to by DATA (which is really a `splay_tree *'). */
2698
2699 static tree
2700 mark_local_for_remap_r (tp, walk_subtrees, data)
2701 tree *tp;
2702 int *walk_subtrees ATTRIBUTE_UNUSED;
2703 void *data;
2704 {
2705 tree t = *tp;
2706 splay_tree st = (splay_tree) data;
2707
2708 if ((TREE_CODE (t) == DECL_STMT
2709 && nonstatic_local_decl_p (DECL_STMT_DECL (t)))
2710 || TREE_CODE (t) == LABEL_STMT)
2711 {
2712 tree decl;
2713 tree copy;
2714
2715 /* Figure out what's being declared. */
2716 decl = (TREE_CODE (t) == DECL_STMT
2717 ? DECL_STMT_DECL (t) : LABEL_STMT_LABEL (t));
2718
2719 /* Make a copy. */
2720 copy = copy_decl_for_inlining (decl,
2721 DECL_CONTEXT (decl),
2722 DECL_CONTEXT (decl));
2723
2724 /* Remember the copy. */
2725 splay_tree_insert (st,
2726 (splay_tree_key) decl,
2727 (splay_tree_value) copy);
2728 }
2729
2730 return NULL_TREE;
2731 }
2732
2733 /* Called via walk_tree when an expression is unsaved. Using the
2734 splay_tree pointed to by ST (which is really a `splay_tree *'),
2735 remaps all local declarations to appropriate replacements. */
2736
2737 static tree
2738 cp_unsave_r (tp, walk_subtrees, data)
2739 tree *tp;
2740 int *walk_subtrees;
2741 void *data;
2742 {
2743 splay_tree st = (splay_tree) data;
2744 splay_tree_node n;
2745
2746 /* Only a local declaration (variable or label). */
2747 if (nonstatic_local_decl_p (*tp))
2748 {
2749 /* Lookup the declaration. */
2750 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2751
2752 /* If it's there, remap it. */
2753 if (n)
2754 *tp = (tree) n->value;
2755 }
2756 else if (TREE_CODE (*tp) == SAVE_EXPR)
2757 remap_save_expr (tp, st, current_function_decl, walk_subtrees);
2758 else
2759 {
2760 copy_tree_r (tp, walk_subtrees, NULL);
2761
2762 /* Do whatever unsaving is required. */
2763 unsave_expr_1 (*tp);
2764 }
2765
2766 /* Keep iterating. */
2767 return NULL_TREE;
2768 }
2769
2770 /* Called by unsave_expr_now whenever an expression (*TP) needs to be
2771 unsaved. */
2772
2773 static void
2774 cp_unsave (tp)
2775 tree *tp;
2776 {
2777 splay_tree st;
2778
2779 /* Create a splay-tree to map old local variable declarations to new
2780 ones. */
2781 st = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2782
2783 /* Walk the tree once figuring out what needs to be remapped. */
2784 walk_tree (tp, mark_local_for_remap_r, st);
2785
2786 /* Walk the tree again, copying, remapping, and unsaving. */
2787 walk_tree (tp, cp_unsave_r, st);
2788
2789 /* Clean up. */
2790 splay_tree_delete (st);
2791 }