cp-tree.h (skip_rtti_stuff): Adjust prototype.
[gcc.git] / gcc / cp / tree.c
1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92-98, 1999 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "obstack.h"
25 #include "tree.h"
26 #include "cp-tree.h"
27 #include "flags.h"
28 #include "rtl.h"
29 #include "toplev.h"
30 #include "ggc.h"
31 #include "insn-config.h"
32 #include "integrate.h"
33
34 static tree bot_manip PROTO((tree *, int *, void *));
35 static tree bot_replace PROTO((tree *, int *, void *));
36 static tree build_cplus_array_type_1 PROTO((tree, tree));
37 static void list_hash_add PROTO((int, tree));
38 static int list_hash PROTO((tree, tree, tree));
39 static tree list_hash_lookup PROTO((int, tree, tree, tree));
40 static void propagate_binfo_offsets PROTO((tree, tree));
41 static cp_lvalue_kind lvalue_p_1 PROTO((tree, int));
42 static tree no_linkage_helper PROTO((tree *, int *, void *));
43 static tree build_srcloc PROTO((char *, int));
44 static void mark_list_hash PROTO ((void *));
45 static int statement_code_p PROTO((enum tree_code));
46 static tree mark_local_for_remap_r PROTO((tree *, int *, void *));
47 static tree cp_unsave_r PROTO ((tree *, int *, void *));
48 static void cp_unsave PROTO((tree *));
49 static tree build_target_expr PROTO((tree, tree));
50
51 #define CEIL(x,y) (((x) + (y) - 1) / (y))
52
53 /* If REF is an lvalue, returns the kind of lvalue that REF is.
54 Otherwise, returns clk_none. If TREAT_CLASS_RVALUES_AS_LVALUES is
55 non-zero, rvalues of class type are considered lvalues. */
56
57 static cp_lvalue_kind
58 lvalue_p_1 (ref, treat_class_rvalues_as_lvalues)
59 tree ref;
60 int treat_class_rvalues_as_lvalues;
61 {
62 cp_lvalue_kind op1_lvalue_kind = clk_none;
63 cp_lvalue_kind op2_lvalue_kind = clk_none;
64
65 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
66 return clk_ordinary;
67
68 if (ref == current_class_ptr && flag_this_is_variable <= 0)
69 return clk_none;
70
71 switch (TREE_CODE (ref))
72 {
73 /* preincrements and predecrements are valid lvals, provided
74 what they refer to are valid lvals. */
75 case PREINCREMENT_EXPR:
76 case PREDECREMENT_EXPR:
77 case SAVE_EXPR:
78 case UNSAVE_EXPR:
79 case TRY_CATCH_EXPR:
80 case WITH_CLEANUP_EXPR:
81 case REALPART_EXPR:
82 case IMAGPART_EXPR:
83 case NOP_EXPR:
84 return lvalue_p_1 (TREE_OPERAND (ref, 0),
85 treat_class_rvalues_as_lvalues);
86
87 case COMPONENT_REF:
88 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
89 treat_class_rvalues_as_lvalues);
90 if (op1_lvalue_kind
91 /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
92 situations. */
93 && TREE_CODE (TREE_OPERAND (ref, 1)) == FIELD_DECL
94 && DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
95 {
96 /* Clear the ordinary bit. If this object was a class
97 rvalue we want to preserve that information. */
98 op1_lvalue_kind &= ~clk_ordinary;
99 /* The lvalue is for a btifield. */
100 op1_lvalue_kind |= clk_bitfield;
101 }
102 return op1_lvalue_kind;
103
104 case STRING_CST:
105 return clk_ordinary;
106
107 case VAR_DECL:
108 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
109 && DECL_LANG_SPECIFIC (ref)
110 && DECL_IN_AGGR_P (ref))
111 return clk_none;
112 case INDIRECT_REF:
113 case ARRAY_REF:
114 case PARM_DECL:
115 case RESULT_DECL:
116 if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
117 return clk_ordinary;
118 break;
119
120 /* A currently unresolved scope ref. */
121 case SCOPE_REF:
122 my_friendly_abort (103);
123 case OFFSET_REF:
124 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
125 return clk_ordinary;
126 /* Fall through. */
127 case MAX_EXPR:
128 case MIN_EXPR:
129 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
130 treat_class_rvalues_as_lvalues);
131 op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
132 treat_class_rvalues_as_lvalues);
133 break;
134
135 case COND_EXPR:
136 op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
137 treat_class_rvalues_as_lvalues);
138 op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2),
139 treat_class_rvalues_as_lvalues);
140 break;
141
142 case MODIFY_EXPR:
143 return clk_ordinary;
144
145 case COMPOUND_EXPR:
146 return lvalue_p_1 (TREE_OPERAND (ref, 1),
147 treat_class_rvalues_as_lvalues);
148
149 case TARGET_EXPR:
150 return treat_class_rvalues_as_lvalues ? clk_class : clk_none;
151
152 case CALL_EXPR:
153 case VA_ARG_EXPR:
154 return ((treat_class_rvalues_as_lvalues
155 && IS_AGGR_TYPE (TREE_TYPE (ref)))
156 ? clk_class : clk_none);
157
158 case FUNCTION_DECL:
159 /* All functions (except non-static-member functions) are
160 lvalues. */
161 return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref)
162 ? clk_none : clk_ordinary);
163
164 default:
165 break;
166 }
167
168 /* If one operand is not an lvalue at all, then this expression is
169 not an lvalue. */
170 if (!op1_lvalue_kind || !op2_lvalue_kind)
171 return clk_none;
172
173 /* Otherwise, it's an lvalue, and it has all the odd properties
174 contributed by either operand. */
175 op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
176 /* It's not an ordinary lvalue if it involves either a bit-field or
177 a class rvalue. */
178 if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
179 op1_lvalue_kind &= ~clk_ordinary;
180 return op1_lvalue_kind;
181 }
182
183 /* If REF is an lvalue, returns the kind of lvalue that REF is.
184 Otherwise, returns clk_none. Lvalues can be assigned, unless they
185 have TREE_READONLY, or unless they are FUNCTION_DECLs. Lvalues can
186 have their address taken, unless they have DECL_REGISTER. */
187
188 cp_lvalue_kind
189 real_lvalue_p (ref)
190 tree ref;
191 {
192 return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/0);
193 }
194
195 /* This differs from real_lvalue_p in that class rvalues are
196 considered lvalues. */
197
198 int
199 lvalue_p (ref)
200 tree ref;
201 {
202 return
203 (lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1) != clk_none);
204 }
205
206 /* Return nonzero if REF is an lvalue valid for this language;
207 otherwise, print an error message and return zero. */
208
209 int
210 lvalue_or_else (ref, string)
211 tree ref;
212 const char *string;
213 {
214 int win = lvalue_p (ref);
215 if (! win)
216 error ("non-lvalue in %s", string);
217 return win;
218 }
219
220 /* Build a TARGET_EXPR, initializing the DECL with the VALUE. */
221
222 static tree
223 build_target_expr (decl, value)
224 tree decl;
225 tree value;
226 {
227 tree t;
228
229 t = build (TARGET_EXPR, TREE_TYPE (decl), decl, value,
230 maybe_build_cleanup (decl), NULL_TREE);
231 /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
232 ignore the TARGET_EXPR. If there really turn out to be no
233 side-effects, then the optimizer should be able to get rid of
234 whatever code is generated anyhow. */
235 TREE_SIDE_EFFECTS (t) = 1;
236
237 return t;
238 }
239
240 /* INIT is a CALL_EXPR which needs info about its target.
241 TYPE is the type that this initialization should appear to have.
242
243 Build an encapsulation of the initialization to perform
244 and return it so that it can be processed by language-independent
245 and language-specific expression expanders. */
246
247 tree
248 build_cplus_new (type, init)
249 tree type;
250 tree init;
251 {
252 tree fn;
253 tree slot;
254 tree rval;
255
256 /* Make sure that we're not trying to create an instance of an
257 abstract class. */
258 abstract_virtuals_error (NULL_TREE, type);
259
260 if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR)
261 return convert (type, init);
262
263 slot = build (VAR_DECL, type);
264 DECL_ARTIFICIAL (slot) = 1;
265 DECL_CONTEXT (slot) = current_function_decl;
266 layout_decl (slot, 0);
267
268 /* We split the CALL_EXPR into its function and its arguments here.
269 Then, in expand_expr, we put them back together. The reason for
270 this is that this expression might be a default argument
271 expression. In that case, we need a new temporary every time the
272 expression is used. That's what break_out_target_exprs does; it
273 replaces every AGGR_INIT_EXPR with a copy that uses a fresh
274 temporary slot. Then, expand_expr builds up a call-expression
275 using the new slot. */
276 fn = TREE_OPERAND (init, 0);
277 rval = build (AGGR_INIT_EXPR, type, fn, TREE_OPERAND (init, 1), slot);
278 TREE_SIDE_EFFECTS (rval) = 1;
279 AGGR_INIT_VIA_CTOR_P (rval)
280 = (TREE_CODE (fn) == ADDR_EXPR
281 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
282 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));
283 rval = build_target_expr (slot, rval);
284
285 return rval;
286 }
287
288 /* Buidl a TARGET_EXPR using INIT to initialize a new temporary of the
289 indicated TYPE. */
290
291 tree
292 build_target_expr_with_type (init, type)
293 tree init;
294 tree type;
295 {
296 tree slot;
297 tree rval;
298
299 slot = build (VAR_DECL, type);
300 DECL_ARTIFICIAL (slot) = 1;
301 DECL_CONTEXT (slot) = current_function_decl;
302 layout_decl (slot, 0);
303 rval = build_target_expr (slot, init);
304
305 return rval;
306 }
307
308 /* Like build_target_expr_with_type, but use the type of INIT. */
309
310 tree
311 get_target_expr (init)
312 tree init;
313 {
314 return build_target_expr_with_type (init, TREE_TYPE (init));
315 }
316
317 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
318 these CALL_EXPRs with tree nodes that will perform the cleanups. */
319
320 tree
321 break_out_cleanups (exp)
322 tree exp;
323 {
324 tree tmp = exp;
325
326 if (TREE_CODE (tmp) == CALL_EXPR
327 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
328 return build_cplus_new (TREE_TYPE (tmp), tmp);
329
330 while (TREE_CODE (tmp) == NOP_EXPR
331 || TREE_CODE (tmp) == CONVERT_EXPR
332 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
333 {
334 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
335 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
336 {
337 TREE_OPERAND (tmp, 0)
338 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
339 TREE_OPERAND (tmp, 0));
340 break;
341 }
342 else
343 tmp = TREE_OPERAND (tmp, 0);
344 }
345 return exp;
346 }
347
348 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
349 copies where they are found. Returns a deep copy all nodes transitively
350 containing CALL_EXPRs. */
351
352 tree
353 break_out_calls (exp)
354 tree exp;
355 {
356 register tree t1, t2 = NULL_TREE;
357 register enum tree_code code;
358 register int changed = 0;
359 register int i;
360
361 if (exp == NULL_TREE)
362 return exp;
363
364 code = TREE_CODE (exp);
365
366 if (code == CALL_EXPR)
367 return copy_node (exp);
368
369 /* Don't try and defeat a save_expr, as it should only be done once. */
370 if (code == SAVE_EXPR)
371 return exp;
372
373 switch (TREE_CODE_CLASS (code))
374 {
375 default:
376 abort ();
377
378 case 'c': /* a constant */
379 case 't': /* a type node */
380 case 'x': /* something random, like an identifier or an ERROR_MARK. */
381 return exp;
382
383 case 'd': /* A decl node */
384 #if 0 /* This is bogus. jason 9/21/94 */
385
386 t1 = break_out_calls (DECL_INITIAL (exp));
387 if (t1 != DECL_INITIAL (exp))
388 {
389 exp = copy_node (exp);
390 DECL_INITIAL (exp) = t1;
391 }
392 #endif
393 return exp;
394
395 case 'b': /* A block node */
396 {
397 /* Don't know how to handle these correctly yet. Must do a
398 break_out_calls on all DECL_INITIAL values for local variables,
399 and also break_out_calls on all sub-blocks and sub-statements. */
400 abort ();
401 }
402 return exp;
403
404 case 'e': /* an expression */
405 case 'r': /* a reference */
406 case 's': /* an expression with side effects */
407 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
408 {
409 t1 = break_out_calls (TREE_OPERAND (exp, i));
410 if (t1 != TREE_OPERAND (exp, i))
411 {
412 exp = copy_node (exp);
413 TREE_OPERAND (exp, i) = t1;
414 }
415 }
416 return exp;
417
418 case '<': /* a comparison expression */
419 case '2': /* a binary arithmetic expression */
420 t2 = break_out_calls (TREE_OPERAND (exp, 1));
421 if (t2 != TREE_OPERAND (exp, 1))
422 changed = 1;
423 case '1': /* a unary arithmetic expression */
424 t1 = break_out_calls (TREE_OPERAND (exp, 0));
425 if (t1 != TREE_OPERAND (exp, 0))
426 changed = 1;
427 if (changed)
428 {
429 if (tree_code_length[(int) code] == 1)
430 return build1 (code, TREE_TYPE (exp), t1);
431 else
432 return build (code, TREE_TYPE (exp), t1, t2);
433 }
434 return exp;
435 }
436
437 }
438 \f
439 extern struct obstack permanent_obstack;
440
441 /* Here is how primitive or already-canonicalized types' hash
442 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
443 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
444
445 /* Construct, lay out and return the type of methods belonging to class
446 BASETYPE and whose arguments are described by ARGTYPES and whose values
447 are described by RETTYPE. If each type exists already, reuse it. */
448
449 tree
450 build_cplus_method_type (basetype, rettype, argtypes)
451 tree basetype, rettype, argtypes;
452 {
453 register tree t;
454 tree ptype;
455 int hashcode;
456
457 /* Make a node of the sort we want. */
458 t = make_node (METHOD_TYPE);
459
460 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
461 TREE_TYPE (t) = rettype;
462 ptype = build_pointer_type (basetype);
463
464 /* The actual arglist for this function includes a "hidden" argument
465 which is "this". Put it into the list of argument types. Make
466 sure that the new argument list is allocated on the same obstack
467 as the type. */
468 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
469 TYPE_ARG_TYPES (t) = argtypes;
470 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
471
472 /* If we already have such a type, use the old one and free this one.
473 Note that it also frees up the above cons cell if found. */
474 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) +
475 type_hash_list (argtypes);
476
477 t = type_hash_canon (hashcode, t);
478
479 if (TYPE_SIZE (t) == 0)
480 layout_type (t);
481
482 return t;
483 }
484
485 static tree
486 build_cplus_array_type_1 (elt_type, index_type)
487 tree elt_type;
488 tree index_type;
489 {
490 tree t;
491
492 if (elt_type == error_mark_node || index_type == error_mark_node)
493 return error_mark_node;
494
495 if (processing_template_decl
496 || uses_template_parms (elt_type)
497 || uses_template_parms (index_type))
498 {
499 t = make_node (ARRAY_TYPE);
500 TREE_TYPE (t) = elt_type;
501 TYPE_DOMAIN (t) = index_type;
502 }
503 else
504 t = build_array_type (elt_type, index_type);
505
506 /* Push these needs up so that initialization takes place
507 more easily. */
508 TYPE_NEEDS_CONSTRUCTING (t)
509 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
510 TYPE_NEEDS_DESTRUCTOR (t)
511 = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
512 return t;
513 }
514
515 tree
516 build_cplus_array_type (elt_type, index_type)
517 tree elt_type;
518 tree index_type;
519 {
520 tree t;
521 int type_quals = CP_TYPE_QUALS (elt_type);
522
523 elt_type = TYPE_MAIN_VARIANT (elt_type);
524
525 t = build_cplus_array_type_1 (elt_type, index_type);
526
527 if (type_quals != TYPE_UNQUALIFIED)
528 t = cp_build_qualified_type (t, type_quals);
529
530 return t;
531 }
532 \f
533 /* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles
534 arrays correctly. In particular, if TYPE is an array of T's, and
535 TYPE_QUALS is non-empty, returns an array of qualified T's. If
536 at attempt is made to qualify a type illegally, and COMPLAIN is
537 non-zero, an error is issued. If COMPLAIN is zero, error_mark_node
538 is returned. */
539
540 tree
541 cp_build_qualified_type_real (type, type_quals, complain)
542 tree type;
543 int type_quals;
544 int complain;
545 {
546 tree result;
547
548 if (type == error_mark_node)
549 return type;
550
551 if (type_quals == TYPE_QUALS (type))
552 return type;
553
554 /* A restrict-qualified pointer type must be a pointer (or reference)
555 to object or incomplete type. */
556 if ((type_quals & TYPE_QUAL_RESTRICT)
557 && TREE_CODE (type) != TEMPLATE_TYPE_PARM
558 && (!POINTER_TYPE_P (type)
559 || TYPE_PTRMEM_P (type)
560 || TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE))
561 {
562 if (complain)
563 cp_error ("`%T' cannot be `restrict'-qualified", type);
564 else
565 return error_mark_node;
566
567 type_quals &= ~TYPE_QUAL_RESTRICT;
568 }
569
570 if (type_quals != TYPE_UNQUALIFIED
571 && TREE_CODE (type) == FUNCTION_TYPE)
572 {
573 if (complain)
574 cp_error ("`%T' cannot be `const'-, `volatile'-, or `restrict'-qualified", type);
575 else
576 return error_mark_node;
577 type_quals = TYPE_UNQUALIFIED;
578 }
579 else if (TREE_CODE (type) == ARRAY_TYPE)
580 {
581 /* In C++, the qualification really applies to the array element
582 type. Obtain the appropriately qualified element type. */
583 tree t;
584 tree element_type
585 = cp_build_qualified_type_real (TREE_TYPE (type),
586 type_quals,
587 complain);
588
589 if (element_type == error_mark_node)
590 return error_mark_node;
591
592 /* See if we already have an identically qualified type. */
593 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
594 if (CP_TYPE_QUALS (t) == type_quals)
595 break;
596
597 /* If we didn't already have it, create it now. */
598 if (!t)
599 {
600 /* Make a new array type, just like the old one, but with the
601 appropriately qualified element type. */
602 t = build_type_copy (type);
603 TREE_TYPE (t) = element_type;
604 }
605
606 /* Even if we already had this variant, we update
607 TYPE_NEEDS_CONSTRUCTING and TYPE_NEEDS_DESTRUCTOR in case
608 they changed since the variant was originally created.
609
610 This seems hokey; if there is some way to use a previous
611 variant *without* coming through here,
612 TYPE_NEEDS_CONSTRUCTING will never be updated. */
613 TYPE_NEEDS_CONSTRUCTING (t)
614 = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
615 TYPE_NEEDS_DESTRUCTOR (t)
616 = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
617 return t;
618 }
619 else if (TYPE_PTRMEMFUNC_P (type))
620 {
621 /* For a pointer-to-member type, we can't just return a
622 cv-qualified version of the RECORD_TYPE. If we do, we
623 haven't change the field that contains the actual pointer to
624 a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong. */
625 tree t;
626
627 t = TYPE_PTRMEMFUNC_FN_TYPE (type);
628 t = cp_build_qualified_type_real (t, type_quals, complain);
629 return build_ptrmemfunc_type (t);
630 }
631
632 /* Retrieve (or create) the appropriately qualified variant. */
633 result = build_qualified_type (type, type_quals);
634
635 /* If this was a pointer-to-method type, and we just made a copy,
636 then we need to clear the cached associated
637 pointer-to-member-function type; it is not valid for the new
638 type. */
639 if (result != type
640 && TREE_CODE (type) == POINTER_TYPE
641 && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
642 TYPE_SET_PTRMEMFUNC_TYPE (result, NULL_TREE);
643
644 return result;
645 }
646
647 /* Returns the canonical version of TYPE. In other words, if TYPE is
648 a typedef, returns the underlying type. The cv-qualification of
649 the type returned matches the type input; they will always be
650 compatible types. */
651
652 tree
653 canonical_type_variant (t)
654 tree t;
655 {
656 return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), CP_TYPE_QUALS (t));
657 }
658 \f
659 /* Add OFFSET to all base types of T.
660
661 OFFSET, which is a type offset, is number of bytes.
662
663 Note that we don't have to worry about having two paths to the
664 same base type, since this type owns its association list. */
665
666 static void
667 propagate_binfo_offsets (binfo, offset)
668 tree binfo;
669 tree offset;
670 {
671 tree binfos = BINFO_BASETYPES (binfo);
672 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
673
674 if (flag_new_abi)
675 {
676 for (i = 0; i < n_baselinks; ++i)
677 {
678 tree base_binfo;
679
680 /* Figure out which base we're looking at. */
681 base_binfo = TREE_VEC_ELT (binfos, i);
682
683 /* Skip virtual bases. Their BINFO_OFFSET doesn't matter
684 since they are always reached by using offsets looked up
685 at run-time. */
686 if (TREE_VIA_VIRTUAL (base_binfo))
687 continue;
688
689 /* Whatever offset this class used to have in its immediate
690 derived class, it is now at OFFSET more bytes in its
691 final derived class, since the immediate derived class is
692 already at the indicated OFFSET. */
693 BINFO_OFFSET (base_binfo)
694 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
695
696 propagate_binfo_offsets (base_binfo, offset);
697 }
698 }
699 else
700 {
701 /* This algorithm, used for the old ABI, is neither simple, nor
702 general. For example, it mishandles the case of:
703
704 struct A;
705 struct B : public A;
706 struct C : public B;
707
708 if B is at offset zero in C, but A is not in offset zero in
709 B. In that case, it sets the BINFO_OFFSET for A to zero.
710 (This sitution arises in the new ABI if B has virtual
711 functions, but A does not.) Rather than change this
712 algorithm, and risking breaking the old ABI, it is preserved
713 here. */
714 for (i = 0; i < n_baselinks; /* note increment is done in the
715 loop. */)
716 {
717 tree base_binfo = TREE_VEC_ELT (binfos, i);
718
719 if (TREE_VIA_VIRTUAL (base_binfo))
720 i += 1;
721 else
722 {
723 int j;
724 tree delta = NULL_TREE;
725
726 for (j = i+1; j < n_baselinks; j++)
727 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
728 {
729 /* The next basetype offset must take into account
730 the space between the classes, not just the
731 size of each class. */
732 delta = size_binop (MINUS_EXPR,
733 BINFO_OFFSET (TREE_VEC_ELT (binfos,
734 j)),
735 BINFO_OFFSET (base_binfo));
736 break;
737 }
738
739 BINFO_OFFSET (base_binfo) = offset;
740
741 propagate_binfo_offsets (base_binfo, offset);
742
743 /* Go to our next class that counts for offset
744 propagation. */
745 i = j;
746 if (i < n_baselinks)
747 offset = size_binop (PLUS_EXPR, offset, delta);
748 }
749 }
750 }
751 }
752
753 /* Makes new binfos for the indirect bases under BINFO, and updates
754 BINFO_OFFSET for them and their bases. */
755
756 void
757 unshare_base_binfos (binfo)
758 tree binfo;
759 {
760 tree binfos = BINFO_BASETYPES (binfo);
761 tree new_binfo;
762 int j;
763
764 if (binfos == NULL_TREE)
765 return;
766
767 /* Now unshare the structure beneath BINFO. */
768 for (j = TREE_VEC_LENGTH (binfos)-1;
769 j >= 0; j--)
770 {
771 tree base_binfo = TREE_VEC_ELT (binfos, j);
772 new_binfo = TREE_VEC_ELT (binfos, j)
773 = make_binfo (BINFO_OFFSET (base_binfo),
774 base_binfo,
775 BINFO_VTABLE (base_binfo),
776 BINFO_VIRTUALS (base_binfo));
777 TREE_VIA_PUBLIC (new_binfo) = TREE_VIA_PUBLIC (base_binfo);
778 TREE_VIA_PROTECTED (new_binfo) = TREE_VIA_PROTECTED (base_binfo);
779 TREE_VIA_VIRTUAL (new_binfo) = TREE_VIA_VIRTUAL (base_binfo);
780 BINFO_INHERITANCE_CHAIN (new_binfo) = binfo;
781 unshare_base_binfos (new_binfo);
782 }
783 }
784
785 /* Finish the work of layout_record, now taking virtual bases into account.
786 Also compute the actual offsets that our base classes will have.
787 This must be performed after the fields are laid out, since virtual
788 baseclasses must lay down at the end of the record.
789
790 Returns the maximum number of virtual functions any of the
791 baseclasses provide. */
792
793 int
794 layout_basetypes (rec, max)
795 tree rec;
796 int max;
797 {
798 tree binfos = TYPE_BINFO_BASETYPES (rec);
799 int i, n_baseclasses = CLASSTYPE_N_BASECLASSES (rec);
800 tree vbase_types;
801 tree *field;
802
803 unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
804 unsigned int desired_align;
805
806 /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer. */
807 register unsigned int const_size = 0;
808 unsigned int nonvirtual_const_size;
809
810 #ifdef STRUCTURE_SIZE_BOUNDARY
811 /* Packed structures don't need to have minimum size. */
812 if (! TYPE_PACKED (rec))
813 record_align = MAX (record_align, STRUCTURE_SIZE_BOUNDARY);
814 #endif
815
816 /* Get all the virtual base types that this type uses. The
817 TREE_VALUE slot holds the virtual baseclass type. Note that
818 get_vbase_types makes copies of the virtual base BINFOs, so that
819 the vbase_types are unshared. */
820 vbase_types = CLASSTYPE_VBASECLASSES (rec);
821
822 my_friendly_assert (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST, 19970302);
823 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
824
825 nonvirtual_const_size = const_size;
826
827 while (vbase_types)
828 {
829 tree basetype = BINFO_TYPE (vbase_types);
830 tree offset;
831
832 desired_align = TYPE_ALIGN (basetype);
833 record_align = MAX (record_align, desired_align);
834
835 if (const_size == 0)
836 offset = integer_zero_node;
837 else
838 {
839 /* Give each virtual base type the alignment it wants. */
840 const_size = CEIL (const_size, desired_align) * desired_align;
841 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
842 }
843
844 if (CLASSTYPE_VSIZE (basetype) > max)
845 max = CLASSTYPE_VSIZE (basetype);
846 BINFO_OFFSET (vbase_types) = offset;
847
848 /* Every virtual baseclass takes a least a UNIT, so that we can
849 take it's address and get something different for each base. */
850 const_size += MAX (BITS_PER_UNIT,
851 TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype)));
852
853 vbase_types = TREE_CHAIN (vbase_types);
854 }
855
856 if (const_size)
857 {
858 /* Because a virtual base might take a single byte above,
859 we have to re-adjust the total size to make sure it is
860 a multiple of the alignment. */
861 /* Give the whole object the alignment it wants. */
862 const_size = CEIL (const_size, record_align) * record_align;
863 }
864
865 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
866 here, as that is for this class, without any virtual base classes. */
867 TYPE_ALIGN (rec) = record_align;
868 if (const_size != nonvirtual_const_size)
869 {
870 TYPE_SIZE (rec) = size_int (const_size);
871 TYPE_SIZE_UNIT (rec) = size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rec),
872 size_int (BITS_PER_UNIT));
873 }
874
875 /* Now propagate offset information throughout the lattice.
876 Simultaneously, remove the temporary FIELD_DECLS we created in
877 build_base_fields to refer to base types. */
878 field = &TYPE_FIELDS (rec);
879 if (TYPE_VFIELD (rec) == *field)
880 {
881 /* If this class did not have a primary base, we create a
882 virtual function table pointer. It will be the first thing
883 in the class, under the new ABI. Skip it; the base fields
884 will follow it. */
885 my_friendly_assert (flag_new_abi
886 && !CLASSTYPE_HAS_PRIMARY_BASE_P (rec),
887 19991218);
888 field = &TREE_CHAIN (*field);
889 }
890
891 for (i = 0; i < n_baseclasses; i++)
892 {
893 register tree base_binfo = TREE_VEC_ELT (binfos, i);
894 register tree basetype = BINFO_TYPE (base_binfo);
895
896 if (TREE_VIA_VIRTUAL (base_binfo))
897 continue;
898
899 my_friendly_assert (TREE_TYPE (*field) == basetype, 23897);
900
901 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
902 cp_warning ("direct base `%T' inaccessible in `%T' due to ambiguity",
903 basetype, rec);
904
905 BINFO_OFFSET (base_binfo)
906 = size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (*field)),
907 BITS_PER_UNIT));
908 propagate_binfo_offsets (base_binfo, BINFO_OFFSET (base_binfo));
909
910 /* Remove this field. */
911 *field = TREE_CHAIN (*field);
912 }
913
914 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
915 vbase_types = TREE_CHAIN (vbase_types))
916 {
917 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
918 unshare_base_binfos (vbase_types);
919 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
920
921 if (extra_warnings)
922 {
923 tree basetype = BINFO_TYPE (vbase_types);
924 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
925 cp_warning ("virtual base `%T' inaccessible in `%T' due to ambiguity",
926 basetype, rec);
927 }
928 }
929
930 return max;
931 }
932
933 \f
934 /* Hashing of lists so that we don't make duplicates.
935 The entry point is `list_hash_canon'. */
936
937 /* Each hash table slot is a bucket containing a chain
938 of these structures. */
939
940 struct list_hash
941 {
942 struct list_hash *next; /* Next structure in the bucket. */
943 int hashcode; /* Hash code of this list. */
944 tree list; /* The list recorded here. */
945 };
946
947 /* Now here is the hash table. When recording a list, it is added
948 to the slot whose index is the hash code mod the table size.
949 Note that the hash table is used for several kinds of lists.
950 While all these live in the same table, they are completely independent,
951 and the hash code is computed differently for each of these. */
952
953 #define TYPE_HASH_SIZE 59
954 static struct list_hash *list_hash_table[TYPE_HASH_SIZE];
955
956 /* Compute a hash code for a list (chain of TREE_LIST nodes
957 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
958 TREE_COMMON slots), by adding the hash codes of the individual entries. */
959
960 static int
961 list_hash (purpose, value, chain)
962 tree purpose, value, chain;
963 {
964 register int hashcode = 0;
965
966 if (chain)
967 hashcode += TYPE_HASH (chain);
968
969 if (value)
970 hashcode += TYPE_HASH (value);
971 else
972 hashcode += 1007;
973 if (purpose)
974 hashcode += TYPE_HASH (purpose);
975 else
976 hashcode += 1009;
977 return hashcode;
978 }
979
980 /* Look in the type hash table for a type isomorphic to TYPE.
981 If one is found, return it. Otherwise return 0. */
982
983 static tree
984 list_hash_lookup (hashcode, purpose, value, chain)
985 int hashcode;
986 tree purpose, value, chain;
987 {
988 register struct list_hash *h;
989
990 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
991 if (h->hashcode == hashcode
992 && TREE_PURPOSE (h->list) == purpose
993 && TREE_VALUE (h->list) == value
994 && TREE_CHAIN (h->list) == chain)
995 return h->list;
996 return 0;
997 }
998
999 /* Add an entry to the list-hash-table
1000 for a list TYPE whose hash code is HASHCODE. */
1001
1002 static void
1003 list_hash_add (hashcode, list)
1004 int hashcode;
1005 tree list;
1006 {
1007 register struct list_hash *h;
1008
1009 h = (struct list_hash *) obstack_alloc (&permanent_obstack, sizeof (struct list_hash));
1010 h->hashcode = hashcode;
1011 h->list = list;
1012 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
1013 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1014 }
1015
1016 /* Given list components PURPOSE, VALUE, AND CHAIN, return the canonical
1017 object for an identical list if one already exists. Otherwise, build a
1018 new one, and record it as the canonical object. */
1019
1020 /* Set to 1 to debug without canonicalization. Never set by program. */
1021
1022 static int debug_no_list_hash = 0;
1023
1024 tree
1025 hash_tree_cons (purpose, value, chain)
1026 tree purpose, value, chain;
1027 {
1028 tree t;
1029 int hashcode = 0;
1030
1031 if (! debug_no_list_hash)
1032 {
1033 hashcode = list_hash (purpose, value, chain);
1034 t = list_hash_lookup (hashcode, purpose, value, chain);
1035 if (t)
1036 return t;
1037 }
1038
1039 t = tree_cons (purpose, value, chain);
1040
1041 /* If this is a new list, record it for later reuse. */
1042 if (! debug_no_list_hash)
1043 list_hash_add (hashcode, t);
1044
1045 return t;
1046 }
1047
1048 /* Constructor for hashed lists. */
1049
1050 tree
1051 hash_tree_chain (value, chain)
1052 tree value, chain;
1053 {
1054 return hash_tree_cons (NULL_TREE, value, chain);
1055 }
1056
1057 /* Similar, but used for concatenating two lists. */
1058
1059 tree
1060 hash_chainon (list1, list2)
1061 tree list1, list2;
1062 {
1063 if (list2 == 0)
1064 return list1;
1065 if (list1 == 0)
1066 return list2;
1067 if (TREE_CHAIN (list1) == NULL_TREE)
1068 return hash_tree_chain (TREE_VALUE (list1), list2);
1069 return hash_tree_chain (TREE_VALUE (list1),
1070 hash_chainon (TREE_CHAIN (list1), list2));
1071 }
1072 \f
1073 /* Build an association between TYPE and some parameters:
1074
1075 OFFSET is the offset added to `this' to convert it to a pointer
1076 of type `TYPE *'
1077
1078 BINFO is the base binfo to use, if we are deriving from one. This
1079 is necessary, as we want specialized parent binfos from base
1080 classes, so that the VTABLE_NAMEs of bases are for the most derived
1081 type, instead of the simple type.
1082
1083 VTABLE is the virtual function table with which to initialize
1084 sub-objects of type TYPE.
1085
1086 VIRTUALS are the virtual functions sitting in VTABLE. */
1087
1088 tree
1089 make_binfo (offset, binfo, vtable, virtuals)
1090 tree offset, binfo;
1091 tree vtable, virtuals;
1092 {
1093 tree new_binfo = make_tree_vec (7);
1094 tree type;
1095
1096 if (TREE_CODE (binfo) == TREE_VEC)
1097 type = BINFO_TYPE (binfo);
1098 else
1099 {
1100 type = binfo;
1101 binfo = CLASS_TYPE_P (type) ? TYPE_BINFO (binfo) : NULL_TREE;
1102 }
1103
1104 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1105 BINFO_OFFSET (new_binfo) = offset;
1106 BINFO_VTABLE (new_binfo) = vtable;
1107 BINFO_VIRTUALS (new_binfo) = virtuals;
1108 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1109
1110 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1111 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1112 return new_binfo;
1113 }
1114
1115 /* Return the binfo value for ELEM in TYPE. */
1116
1117 tree
1118 binfo_value (elem, type)
1119 tree elem;
1120 tree type;
1121 {
1122 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1123 compiler_error ("base class `%s' ambiguous in binfo_value",
1124 TYPE_NAME_STRING (elem));
1125 if (elem == type)
1126 return TYPE_BINFO (type);
1127 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1128 return type;
1129 return get_binfo (elem, type, 0);
1130 }
1131
1132 /* Return a reversed copy of the BINFO-chain given by PATH. (If the
1133 BINFO_INHERITANCE_CHAIN points from base classes to derived
1134 classes, it will instead point from derived classes to base
1135 classes.) Returns the first node in the reversed chain. */
1136
1137 tree
1138 reverse_path (path)
1139 tree path;
1140 {
1141 register tree prev = NULL_TREE, cur;
1142 for (cur = path; cur; cur = BINFO_INHERITANCE_CHAIN (cur))
1143 {
1144 tree r = copy_node (cur);
1145 BINFO_INHERITANCE_CHAIN (r) = prev;
1146 prev = r;
1147 }
1148 return prev;
1149 }
1150
1151 void
1152 debug_binfo (elem)
1153 tree elem;
1154 {
1155 unsigned HOST_WIDE_INT n;
1156 tree virtuals;
1157
1158 fprintf (stderr, "type \"%s\"; offset = %ld\n",
1159 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1160 (long) TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1161 fprintf (stderr, "vtable type:\n");
1162 debug_tree (BINFO_TYPE (elem));
1163 if (BINFO_VTABLE (elem))
1164 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1165 else
1166 fprintf (stderr, "no vtable decl yet\n");
1167 fprintf (stderr, "virtuals:\n");
1168 virtuals = skip_rtti_stuff (elem, BINFO_TYPE (elem), &n);
1169
1170 while (virtuals)
1171 {
1172 tree fndecl = TREE_VALUE (virtuals);
1173 fprintf (stderr, "%s [%ld =? %ld]\n",
1174 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1175 (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1176 ++n;
1177 virtuals = TREE_CHAIN (virtuals);
1178 }
1179 }
1180
1181 int
1182 count_functions (t)
1183 tree t;
1184 {
1185 int i;
1186 if (TREE_CODE (t) == FUNCTION_DECL)
1187 return 1;
1188 else if (TREE_CODE (t) == OVERLOAD)
1189 {
1190 for (i=0; t; t = OVL_CHAIN (t))
1191 i++;
1192 return i;
1193 }
1194
1195 my_friendly_abort (359);
1196 return 0;
1197 }
1198
1199 int
1200 is_overloaded_fn (x)
1201 tree x;
1202 {
1203 /* A baselink is also considered an overloaded function. */
1204 if (TREE_CODE (x) == OFFSET_REF)
1205 x = TREE_OPERAND (x, 1);
1206 if (BASELINK_P (x))
1207 x = TREE_VALUE (x);
1208 return (TREE_CODE (x) == FUNCTION_DECL
1209 || TREE_CODE (x) == TEMPLATE_ID_EXPR
1210 || DECL_FUNCTION_TEMPLATE_P (x)
1211 || TREE_CODE (x) == OVERLOAD);
1212 }
1213
1214 int
1215 really_overloaded_fn (x)
1216 tree x;
1217 {
1218 /* A baselink is also considered an overloaded function. */
1219 if (TREE_CODE (x) == OFFSET_REF)
1220 x = TREE_OPERAND (x, 1);
1221 if (BASELINK_P (x))
1222 x = TREE_VALUE (x);
1223 return (TREE_CODE (x) == OVERLOAD
1224 && (TREE_CHAIN (x) != NULL_TREE
1225 || DECL_FUNCTION_TEMPLATE_P (OVL_FUNCTION (x))));
1226 }
1227
1228 tree
1229 get_first_fn (from)
1230 tree from;
1231 {
1232 my_friendly_assert (is_overloaded_fn (from), 9);
1233 /* A baselink is also considered an overloaded function. */
1234 if (BASELINK_P (from))
1235 from = TREE_VALUE (from);
1236 return OVL_CURRENT (from);
1237 }
1238
1239 /* Returns nonzero if T is a ->* or .* expression that refers to a
1240 member function. */
1241
1242 int
1243 bound_pmf_p (t)
1244 tree t;
1245 {
1246 return (TREE_CODE (t) == OFFSET_REF
1247 && TYPE_PTRMEMFUNC_P (TREE_TYPE (TREE_OPERAND (t, 1))));
1248 }
1249
1250 /* Return a new OVL node, concatenating it with the old one. */
1251
1252 tree
1253 ovl_cons (decl, chain)
1254 tree decl;
1255 tree chain;
1256 {
1257 tree result = make_node (OVERLOAD);
1258 TREE_TYPE (result) = unknown_type_node;
1259 OVL_FUNCTION (result) = decl;
1260 TREE_CHAIN (result) = chain;
1261
1262 return result;
1263 }
1264
1265 /* Build a new overloaded function. If this is the first one,
1266 just return it; otherwise, ovl_cons the _DECLs */
1267
1268 tree
1269 build_overload (decl, chain)
1270 tree decl;
1271 tree chain;
1272 {
1273 if (! chain && TREE_CODE (decl) != TEMPLATE_DECL)
1274 return decl;
1275 if (chain && TREE_CODE (chain) != OVERLOAD)
1276 chain = ovl_cons (chain, NULL_TREE);
1277 return ovl_cons (decl, chain);
1278 }
1279
1280 /* True if fn is in ovl. */
1281
1282 int
1283 ovl_member (fn, ovl)
1284 tree fn;
1285 tree ovl;
1286 {
1287 if (ovl == NULL_TREE)
1288 return 0;
1289 if (TREE_CODE (ovl) != OVERLOAD)
1290 return ovl == fn;
1291 for (; ovl; ovl = OVL_CHAIN (ovl))
1292 if (OVL_FUNCTION (ovl) == fn)
1293 return 1;
1294 return 0;
1295 }
1296
1297 int
1298 is_aggr_type_2 (t1, t2)
1299 tree t1, t2;
1300 {
1301 if (TREE_CODE (t1) != TREE_CODE (t2))
1302 return 0;
1303 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1304 }
1305
1306 /* Returns non-zero if CODE is the code for a statement. */
1307
1308 static int
1309 statement_code_p (code)
1310 enum tree_code code;
1311 {
1312 switch (code)
1313 {
1314 case EXPR_STMT:
1315 case COMPOUND_STMT:
1316 case DECL_STMT:
1317 case IF_STMT:
1318 case FOR_STMT:
1319 case WHILE_STMT:
1320 case DO_STMT:
1321 case RETURN_STMT:
1322 case BREAK_STMT:
1323 case CONTINUE_STMT:
1324 case SWITCH_STMT:
1325 case GOTO_STMT:
1326 case LABEL_STMT:
1327 case ASM_STMT:
1328 case SUBOBJECT:
1329 case CLEANUP_STMT:
1330 case START_CATCH_STMT:
1331 case CTOR_STMT:
1332 case SCOPE_STMT:
1333 case CTOR_INITIALIZER:
1334 case CASE_LABEL:
1335 case RETURN_INIT:
1336 case TRY_BLOCK:
1337 case HANDLER:
1338 return 1;
1339
1340 default:
1341 return 0;
1342 }
1343 }
1344 \f
1345 #define PRINT_RING_SIZE 4
1346
1347 const char *
1348 lang_printable_name (decl, v)
1349 tree decl;
1350 int v;
1351 {
1352 static tree decl_ring[PRINT_RING_SIZE];
1353 static char *print_ring[PRINT_RING_SIZE];
1354 static int ring_counter;
1355 int i;
1356
1357 /* Only cache functions. */
1358 if (v < 2
1359 || TREE_CODE (decl) != FUNCTION_DECL
1360 || DECL_LANG_SPECIFIC (decl) == 0)
1361 return lang_decl_name (decl, v);
1362
1363 /* See if this print name is lying around. */
1364 for (i = 0; i < PRINT_RING_SIZE; i++)
1365 if (decl_ring[i] == decl)
1366 /* yes, so return it. */
1367 return print_ring[i];
1368
1369 if (++ring_counter == PRINT_RING_SIZE)
1370 ring_counter = 0;
1371
1372 if (current_function_decl != NULL_TREE)
1373 {
1374 if (decl_ring[ring_counter] == current_function_decl)
1375 ring_counter += 1;
1376 if (ring_counter == PRINT_RING_SIZE)
1377 ring_counter = 0;
1378 if (decl_ring[ring_counter] == current_function_decl)
1379 my_friendly_abort (106);
1380 }
1381
1382 if (print_ring[ring_counter])
1383 free (print_ring[ring_counter]);
1384
1385 print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v));
1386 decl_ring[ring_counter] = decl;
1387 return print_ring[ring_counter];
1388 }
1389 \f
1390 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1391 listed in RAISES. */
1392
1393 tree
1394 build_exception_variant (type, raises)
1395 tree type;
1396 tree raises;
1397 {
1398 tree v = TYPE_MAIN_VARIANT (type);
1399 int type_quals = TYPE_QUALS (type);
1400
1401 for (; v; v = TYPE_NEXT_VARIANT (v))
1402 if (TYPE_QUALS (v) == type_quals
1403 && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1))
1404 return v;
1405
1406 /* Need to build a new variant. */
1407 v = build_type_copy (type);
1408 TYPE_RAISES_EXCEPTIONS (v) = raises;
1409 return v;
1410 }
1411
1412 /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new one together with its
1413 lang_specific field and its corresponding TEMPLATE_DECL node */
1414
1415 tree
1416 copy_template_template_parm (t)
1417 tree t;
1418 {
1419 tree template = TYPE_NAME (t);
1420 tree t2;
1421
1422 t2 = make_aggr_type (TEMPLATE_TEMPLATE_PARM);
1423 template = copy_node (template);
1424 copy_lang_decl (template);
1425
1426 TREE_TYPE (template) = t2;
1427 TYPE_NAME (t2) = template;
1428 TYPE_STUB_DECL (t2) = template;
1429
1430 /* No need to copy these */
1431 TYPE_FIELDS (t2) = TYPE_FIELDS (t);
1432 TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2)
1433 = TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t);
1434 return t2;
1435 }
1436
1437 /* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
1438 FUNC is called with the DATA and the address of each sub-tree. If
1439 FUNC returns a non-NULL value, the traversal is aborted, and the
1440 value returned by FUNC is returned. */
1441
1442 tree
1443 walk_tree (tp, func, data)
1444 tree *tp;
1445 walk_tree_fn func;
1446 void *data;
1447 {
1448 enum tree_code code;
1449 int walk_subtrees;
1450 tree result;
1451
1452 #define WALK_SUBTREE(NODE) \
1453 do \
1454 { \
1455 result = walk_tree (&(NODE), func, data); \
1456 if (result) \
1457 return result; \
1458 } \
1459 while (0)
1460
1461 /* Skip empty subtrees. */
1462 if (!*tp)
1463 return NULL_TREE;
1464
1465 /* Call the function. */
1466 walk_subtrees = 1;
1467 result = (*func) (tp, &walk_subtrees, data);
1468
1469 /* If we found something, return it. */
1470 if (result)
1471 return result;
1472
1473 /* Even if we didn't, FUNC may have decided that there was nothing
1474 interesting below this point in the tree. */
1475 if (!walk_subtrees)
1476 return NULL_TREE;
1477
1478 code = TREE_CODE (*tp);
1479
1480 /* Handle common cases up front. */
1481 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1482 || TREE_CODE_CLASS (code) == 'r'
1483 || TREE_CODE_CLASS (code) == 's')
1484 {
1485 int i, len;
1486
1487 /* Walk over all the sub-trees of this operand. */
1488 len = first_rtl_op (code);
1489 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
1490 But, we only want to walk once. */
1491 if (code == TARGET_EXPR
1492 && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
1493 --len;
1494 /* Go through the subtrees. We need to do this in forward order so
1495 that the scope of a FOR_EXPR is handled properly. */
1496 for (i = 0; i < len; ++i)
1497 WALK_SUBTREE (TREE_OPERAND (*tp, i));
1498
1499 /* For statements, we also walk the chain so that we cover the
1500 entire statement tree. */
1501 if (statement_code_p (code))
1502 {
1503 if (code == DECL_STMT
1504 && DECL_STMT_DECL (*tp)
1505 && TREE_CODE_CLASS (TREE_CODE (DECL_STMT_DECL (*tp))) == 'd')
1506 {
1507 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
1508 into declarations that are just mentioned, rather than
1509 declared; they don't really belong to this part of the tree.
1510 And, we can see cycles: the initializer for a declaration can
1511 refer to the declaration itself. */
1512 WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
1513 WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
1514 }
1515
1516 WALK_SUBTREE (TREE_CHAIN (*tp));
1517 }
1518
1519 /* We didn't find what we were looking for. */
1520 return NULL_TREE;
1521 }
1522 else if (TREE_CODE_CLASS (code) == 'd')
1523 {
1524 WALK_SUBTREE (TREE_TYPE (*tp));
1525
1526 /* We didn't find what we were looking for. */
1527 return NULL_TREE;
1528 }
1529
1530 /* Not one of the easy cases. We must explicitly go through the
1531 children. */
1532 switch (code)
1533 {
1534 case ERROR_MARK:
1535 case IDENTIFIER_NODE:
1536 case INTEGER_CST:
1537 case REAL_CST:
1538 case STRING_CST:
1539 case DEFAULT_ARG:
1540 case TEMPLATE_TEMPLATE_PARM:
1541 case TEMPLATE_PARM_INDEX:
1542 case TEMPLATE_TYPE_PARM:
1543 case REAL_TYPE:
1544 case COMPLEX_TYPE:
1545 case VOID_TYPE:
1546 case BOOLEAN_TYPE:
1547 case TYPENAME_TYPE:
1548 case UNION_TYPE:
1549 case ENUMERAL_TYPE:
1550 case TYPEOF_TYPE:
1551 case BLOCK:
1552 /* None of thse have subtrees other than those already walked
1553 above. */
1554 break;
1555
1556 case PTRMEM_CST:
1557 WALK_SUBTREE (TREE_TYPE (*tp));
1558 break;
1559
1560 case POINTER_TYPE:
1561 case REFERENCE_TYPE:
1562 WALK_SUBTREE (TREE_TYPE (*tp));
1563 break;
1564
1565 case TREE_LIST:
1566 WALK_SUBTREE (TREE_PURPOSE (*tp));
1567 WALK_SUBTREE (TREE_VALUE (*tp));
1568 WALK_SUBTREE (TREE_CHAIN (*tp));
1569 break;
1570
1571 case OVERLOAD:
1572 WALK_SUBTREE (OVL_FUNCTION (*tp));
1573 WALK_SUBTREE (OVL_CHAIN (*tp));
1574 break;
1575
1576 case TREE_VEC:
1577 {
1578 int len = TREE_VEC_LENGTH (*tp);
1579 while (len--)
1580 WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
1581 }
1582 break;
1583
1584 case COMPLEX_CST:
1585 WALK_SUBTREE (TREE_REALPART (*tp));
1586 WALK_SUBTREE (TREE_IMAGPART (*tp));
1587 break;
1588
1589 case CONSTRUCTOR:
1590 WALK_SUBTREE (CONSTRUCTOR_ELTS (*tp));
1591 break;
1592
1593 case METHOD_TYPE:
1594 WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
1595 /* Fall through. */
1596
1597 case FUNCTION_TYPE:
1598 WALK_SUBTREE (TREE_TYPE (*tp));
1599 WALK_SUBTREE (TYPE_ARG_TYPES (*tp));
1600 break;
1601
1602 case ARRAY_TYPE:
1603 WALK_SUBTREE (TREE_TYPE (*tp));
1604 WALK_SUBTREE (TYPE_DOMAIN (*tp));
1605 break;
1606
1607 case INTEGER_TYPE:
1608 WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
1609 WALK_SUBTREE (TYPE_MAX_VALUE (*tp));
1610 break;
1611
1612 case OFFSET_TYPE:
1613 WALK_SUBTREE (TREE_TYPE (*tp));
1614 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (*tp));
1615 break;
1616
1617 case RECORD_TYPE:
1618 if (TYPE_PTRMEMFUNC_P (*tp))
1619 WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp));
1620 break;
1621
1622 default:
1623 my_friendly_abort (19990803);
1624 }
1625
1626 /* We didn't find what we were looking for. */
1627 return NULL_TREE;
1628
1629 #undef WALK_SUBTREE
1630 }
1631
1632 /* Passed to walk_tree. Checks for the use of types with no linkage. */
1633
1634 static tree
1635 no_linkage_helper (tp, walk_subtrees, data)
1636 tree *tp;
1637 int *walk_subtrees ATTRIBUTE_UNUSED;
1638 void *data ATTRIBUTE_UNUSED;
1639 {
1640 tree t = *tp;
1641
1642 if (TYPE_P (t)
1643 && (IS_AGGR_TYPE (t) || TREE_CODE (t) == ENUMERAL_TYPE)
1644 && (decl_function_context (TYPE_MAIN_DECL (t))
1645 || ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))))
1646 return t;
1647 return NULL_TREE;
1648 }
1649
1650 /* Check if the type T depends on a type with no linkage and if so, return
1651 it. */
1652
1653 tree
1654 no_linkage_check (t)
1655 tree t;
1656 {
1657 /* There's no point in checking linkage on template functions; we
1658 can't know their complete types. */
1659 if (processing_template_decl)
1660 return NULL_TREE;
1661
1662 t = walk_tree (&t, no_linkage_helper, NULL);
1663 if (t != error_mark_node)
1664 return t;
1665 return NULL_TREE;
1666 }
1667
1668 /* Passed to walk_tree. Copies the node pointed to, if appropriate. */
1669
1670 tree
1671 copy_tree_r (tp, walk_subtrees, data)
1672 tree *tp;
1673 int *walk_subtrees;
1674 void *data ATTRIBUTE_UNUSED;
1675 {
1676 enum tree_code code = TREE_CODE (*tp);
1677
1678 /* We make copies of most nodes. */
1679 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
1680 || TREE_CODE_CLASS (code) == 'r'
1681 || TREE_CODE_CLASS (code) == 'c'
1682 || TREE_CODE_CLASS (code) == 's'
1683 || code == PARM_DECL
1684 || code == TREE_LIST
1685 || code == TREE_VEC
1686 || code == OVERLOAD)
1687 {
1688 /* Because the chain gets clobbered when we make a copy, we save it
1689 here. */
1690 tree chain = TREE_CHAIN (*tp);
1691
1692 /* Copy the node. */
1693 *tp = copy_node (*tp);
1694
1695 /* Now, restore the chain, if appropriate. That will cause
1696 walk_tree to walk into the chain as well. */
1697 if (code == PARM_DECL || code == TREE_LIST || code == OVERLOAD
1698 || statement_code_p (code))
1699 TREE_CHAIN (*tp) = chain;
1700
1701 /* For now, we don't update BLOCKs when we make copies. So, we
1702 have to nullify all scope-statements. */
1703 if (TREE_CODE (*tp) == SCOPE_STMT)
1704 SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
1705 }
1706 else if (code == TEMPLATE_TEMPLATE_PARM)
1707 /* These must be copied specially. */
1708 *tp = copy_template_template_parm (*tp);
1709 else if (TREE_CODE_CLASS (code) == 't')
1710 /* There's no need to copy types, or anything beneath them. */
1711 *walk_subtrees = 0;
1712
1713 return NULL_TREE;
1714 }
1715
1716 #ifdef GATHER_STATISTICS
1717 extern int depth_reached;
1718 #endif
1719
1720 void
1721 print_lang_statistics ()
1722 {
1723 print_search_statistics ();
1724 print_class_statistics ();
1725 #ifdef GATHER_STATISTICS
1726 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
1727 depth_reached);
1728 #endif
1729 }
1730
1731 /* This is used by the `assert' macro. It is provided in libgcc.a,
1732 which `cc' doesn't know how to link. Note that the C++ front-end
1733 no longer actually uses the `assert' macro (instead, it calls
1734 my_friendly_assert). But all of the back-end files still need this. */
1735
1736 void
1737 __eprintf (string, expression, line, filename)
1738 const char *string;
1739 const char *expression;
1740 unsigned line;
1741 const char *filename;
1742 {
1743 fprintf (stderr, string, expression, line, filename);
1744 fflush (stderr);
1745 abort ();
1746 }
1747
1748 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1749 (which is an ARRAY_TYPE). This counts only elements of the top
1750 array. */
1751
1752 tree
1753 array_type_nelts_top (type)
1754 tree type;
1755 {
1756 return fold (build (PLUS_EXPR, sizetype,
1757 array_type_nelts (type),
1758 integer_one_node));
1759 }
1760
1761 /* Return, as an INTEGER_CST node, the number of elements for TYPE
1762 (which is an ARRAY_TYPE). This one is a recursive count of all
1763 ARRAY_TYPEs that are clumped together. */
1764
1765 tree
1766 array_type_nelts_total (type)
1767 tree type;
1768 {
1769 tree sz = array_type_nelts_top (type);
1770 type = TREE_TYPE (type);
1771 while (TREE_CODE (type) == ARRAY_TYPE)
1772 {
1773 tree n = array_type_nelts_top (type);
1774 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1775 type = TREE_TYPE (type);
1776 }
1777 return sz;
1778 }
1779
1780 /* Called from break_out_target_exprs via mapcar. */
1781
1782 static tree
1783 bot_manip (tp, walk_subtrees, data)
1784 tree *tp;
1785 int *walk_subtrees;
1786 void *data;
1787 {
1788 splay_tree target_remap = ((splay_tree) data);
1789 tree t = *tp;
1790
1791 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1792 {
1793 /* There can't be any TARGET_EXPRs below this point. */
1794 *walk_subtrees = 0;
1795 return NULL_TREE;
1796 }
1797 else if (TREE_CODE (t) == TARGET_EXPR)
1798 {
1799 tree u;
1800
1801 if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
1802 {
1803 mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0));
1804 u = build_cplus_new
1805 (TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1)));
1806 }
1807 else
1808 {
1809 u = copy_node (t);
1810 TREE_OPERAND (u, 0) = build (VAR_DECL, TREE_TYPE (t));
1811 layout_decl (TREE_OPERAND (u, 0), 0);
1812 }
1813
1814 /* Map the old variable to the new one. */
1815 splay_tree_insert (target_remap,
1816 (splay_tree_key) TREE_OPERAND (t, 0),
1817 (splay_tree_value) TREE_OPERAND (u, 0));
1818
1819 /* Replace the old expression with the new version. */
1820 *tp = u;
1821 /* We don't have to go below this point; the recursive call to
1822 break_out_target_exprs will have handled anything below this
1823 point. */
1824 *walk_subtrees = 0;
1825 return NULL_TREE;
1826 }
1827 else if (TREE_CODE (t) == CALL_EXPR)
1828 mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
1829
1830 /* Make a copy of this node. */
1831 return copy_tree_r (tp, walk_subtrees, NULL);
1832 }
1833
1834 /* Replace all remapped VAR_DECLs in T with their new equivalents.
1835 DATA is really a splay-tree mapping old variables to new
1836 variables. */
1837
1838 static tree
1839 bot_replace (t, walk_subtrees, data)
1840 tree *t;
1841 int *walk_subtrees ATTRIBUTE_UNUSED;
1842 void *data;
1843 {
1844 splay_tree target_remap = ((splay_tree) data);
1845
1846 if (TREE_CODE (*t) == VAR_DECL)
1847 {
1848 splay_tree_node n = splay_tree_lookup (target_remap,
1849 (splay_tree_key) *t);
1850 if (n)
1851 *t = (tree) n->value;
1852 }
1853
1854 return NULL_TREE;
1855 }
1856
1857 /* When we parse a default argument expression, we may create
1858 temporary variables via TARGET_EXPRs. When we actually use the
1859 default-argument expression, we make a copy of the expression, but
1860 we must replace the temporaries with appropriate local versions. */
1861
1862 tree
1863 break_out_target_exprs (t)
1864 tree t;
1865 {
1866 static int target_remap_count;
1867 static splay_tree target_remap;
1868
1869 if (!target_remap_count++)
1870 target_remap = splay_tree_new (splay_tree_compare_pointers,
1871 /*splay_tree_delete_key_fn=*/NULL,
1872 /*splay_tree_delete_value_fn=*/NULL);
1873 walk_tree (&t, bot_manip, target_remap);
1874 walk_tree (&t, bot_replace, target_remap);
1875
1876 if (!--target_remap_count)
1877 {
1878 splay_tree_delete (target_remap);
1879 target_remap = NULL;
1880 }
1881
1882 return t;
1883 }
1884
1885 /* Obstack used for allocating nodes in template function and variable
1886 definitions. */
1887
1888 /* Similar to `build_nt', except that we set TREE_COMPLEXITY to be the
1889 current line number. */
1890
1891 tree
1892 build_min_nt VPROTO((enum tree_code code, ...))
1893 {
1894 #ifndef ANSI_PROTOTYPES
1895 enum tree_code code;
1896 #endif
1897 va_list p;
1898 register tree t;
1899 register int length;
1900 register int i;
1901
1902 VA_START (p, code);
1903
1904 #ifndef ANSI_PROTOTYPES
1905 code = va_arg (p, enum tree_code);
1906 #endif
1907
1908 t = make_node (code);
1909 length = tree_code_length[(int) code];
1910 TREE_COMPLEXITY (t) = lineno;
1911
1912 for (i = 0; i < length; i++)
1913 {
1914 tree x = va_arg (p, tree);
1915 TREE_OPERAND (t, i) = x;
1916 }
1917
1918 va_end (p);
1919 return t;
1920 }
1921
1922 /* Similar to `build', except we set TREE_COMPLEXITY to the current
1923 line-number. */
1924
1925 tree
1926 build_min VPROTO((enum tree_code code, tree tt, ...))
1927 {
1928 #ifndef ANSI_PROTOTYPES
1929 enum tree_code code;
1930 tree tt;
1931 #endif
1932 va_list p;
1933 register tree t;
1934 register int length;
1935 register int i;
1936
1937 VA_START (p, tt);
1938
1939 #ifndef ANSI_PROTOTYPES
1940 code = va_arg (p, enum tree_code);
1941 tt = va_arg (p, tree);
1942 #endif
1943
1944 t = make_node (code);
1945 length = tree_code_length[(int) code];
1946 TREE_TYPE (t) = tt;
1947 TREE_COMPLEXITY (t) = lineno;
1948
1949 for (i = 0; i < length; i++)
1950 {
1951 tree x = va_arg (p, tree);
1952 TREE_OPERAND (t, i) = x;
1953 }
1954
1955 va_end (p);
1956 return t;
1957 }
1958
1959 tree
1960 get_type_decl (t)
1961 tree t;
1962 {
1963 if (TREE_CODE (t) == TYPE_DECL)
1964 return t;
1965 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
1966 return TYPE_STUB_DECL (t);
1967
1968 my_friendly_abort (42);
1969
1970 /* Stop compiler from complaining control reaches end of non-void function. */
1971 return 0;
1972 }
1973
1974 int
1975 can_free (obstack, t)
1976 struct obstack *obstack;
1977 tree t;
1978 {
1979 int size = 0;
1980
1981 if (TREE_CODE (t) == TREE_VEC)
1982 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
1983 else
1984 my_friendly_abort (42);
1985
1986 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
1987 & ~ obstack_alignment_mask (obstack))
1988 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
1989 return 1;
1990 #undef ROUND
1991
1992 return 0;
1993 }
1994
1995 /* Return first vector element whose BINFO_TYPE is ELEM.
1996 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
1997
1998 tree
1999 vec_binfo_member (elem, vec)
2000 tree elem, vec;
2001 {
2002 int i;
2003
2004 if (vec)
2005 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
2006 if (same_type_p (elem, BINFO_TYPE (TREE_VEC_ELT (vec, i))))
2007 return TREE_VEC_ELT (vec, i);
2008
2009 return NULL_TREE;
2010 }
2011
2012 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2013 the wrong thing for decl_function_context. Hopefully the uses in the
2014 backend won't matter, since we don't need a static chain for local class
2015 methods. FIXME! */
2016
2017 tree
2018 hack_decl_function_context (decl)
2019 tree decl;
2020 {
2021 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
2022 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
2023 return decl_function_context (decl);
2024 }
2025
2026 /* Returns the namespace that contains DECL, whether directly or
2027 indirectly. */
2028
2029 tree
2030 decl_namespace_context (decl)
2031 tree decl;
2032 {
2033 while (1)
2034 {
2035 if (TREE_CODE (decl) == NAMESPACE_DECL)
2036 return decl;
2037 else if (TYPE_P (decl))
2038 decl = CP_DECL_CONTEXT (TYPE_MAIN_DECL (decl));
2039 else
2040 decl = CP_DECL_CONTEXT (decl);
2041 }
2042 }
2043
2044 /* Return truthvalue of whether T1 is the same tree structure as T2.
2045 Return 1 if they are the same.
2046 Return 0 if they are understandably different.
2047 Return -1 if either contains tree structure not understood by
2048 this function. */
2049
2050 int
2051 cp_tree_equal (t1, t2)
2052 tree t1, t2;
2053 {
2054 register enum tree_code code1, code2;
2055 int cmp;
2056
2057 if (t1 == t2)
2058 return 1;
2059 if (t1 == 0 || t2 == 0)
2060 return 0;
2061
2062 code1 = TREE_CODE (t1);
2063 code2 = TREE_CODE (t2);
2064
2065 if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
2066 {
2067 if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
2068 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2069 else
2070 return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
2071 }
2072 else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
2073 || code2 == NON_LVALUE_EXPR)
2074 return cp_tree_equal (t1, TREE_OPERAND (t2, 0));
2075
2076 if (code1 != code2)
2077 return 0;
2078
2079 switch (code1)
2080 {
2081 case INTEGER_CST:
2082 return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
2083 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);
2084
2085 case REAL_CST:
2086 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
2087
2088 case STRING_CST:
2089 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
2090 && !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2091 TREE_STRING_LENGTH (t1));
2092
2093 case CONSTRUCTOR:
2094 /* We need to do this when determining whether or not two
2095 non-type pointer to member function template arguments
2096 are the same. */
2097 if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
2098 /* The first operand is RTL. */
2099 && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0)))
2100 return 0;
2101 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2102
2103 case TREE_LIST:
2104 cmp = cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2105 if (cmp <= 0)
2106 return cmp;
2107 cmp = cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2));
2108 if (cmp <= 0)
2109 return cmp;
2110 return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
2111
2112 case SAVE_EXPR:
2113 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2114
2115 case CALL_EXPR:
2116 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2117 if (cmp <= 0)
2118 return cmp;
2119 return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2120
2121 case TARGET_EXPR:
2122 /* Special case: if either target is an unallocated VAR_DECL,
2123 it means that it's going to be unified with whatever the
2124 TARGET_EXPR is really supposed to initialize, so treat it
2125 as being equivalent to anything. */
2126 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
2127 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
2128 && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
2129 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
2130 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
2131 && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
2132 cmp = 1;
2133 else
2134 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2135 if (cmp <= 0)
2136 return cmp;
2137 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2138
2139 case WITH_CLEANUP_EXPR:
2140 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2141 if (cmp <= 0)
2142 return cmp;
2143 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));
2144
2145 case COMPONENT_REF:
2146 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
2147 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2148 return 0;
2149
2150 case VAR_DECL:
2151 case PARM_DECL:
2152 case CONST_DECL:
2153 case FUNCTION_DECL:
2154 return 0;
2155
2156 case TEMPLATE_PARM_INDEX:
2157 return TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
2158 && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2);
2159
2160 case SIZEOF_EXPR:
2161 case ALIGNOF_EXPR:
2162 if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
2163 return 0;
2164 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2165 return same_type_p (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2166 break;
2167
2168 case PTRMEM_CST:
2169 /* Two pointer-to-members are the same if they point to the same
2170 field or function in the same class. */
2171 return (PTRMEM_CST_MEMBER (t1) == PTRMEM_CST_MEMBER (t2)
2172 && same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)));
2173
2174 default:
2175 break;
2176 }
2177
2178 switch (TREE_CODE_CLASS (code1))
2179 {
2180 int i;
2181 case '1':
2182 case '2':
2183 case '<':
2184 case 'e':
2185 case 'r':
2186 case 's':
2187 cmp = 1;
2188 for (i=0; i<tree_code_length[(int) code1]; ++i)
2189 {
2190 cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
2191 if (cmp <= 0)
2192 return cmp;
2193 }
2194 return cmp;
2195 }
2196
2197 return -1;
2198 }
2199
2200 /* Build a wrapper around some pointer PTR so we can use it as a tree. */
2201
2202 tree
2203 build_ptr_wrapper (ptr)
2204 void *ptr;
2205 {
2206 tree t = make_node (WRAPPER);
2207 WRAPPER_PTR (t) = ptr;
2208 return t;
2209 }
2210
2211 /* Same, but on the expression_obstack. */
2212
2213 tree
2214 build_expr_ptr_wrapper (ptr)
2215 void *ptr;
2216 {
2217 return build_ptr_wrapper (ptr);
2218 }
2219
2220 /* Build a wrapper around some integer I so we can use it as a tree. */
2221
2222 tree
2223 build_int_wrapper (i)
2224 int i;
2225 {
2226 tree t = make_node (WRAPPER);
2227 WRAPPER_INT (t) = i;
2228 return t;
2229 }
2230
2231 static tree
2232 build_srcloc (file, line)
2233 char *file;
2234 int line;
2235 {
2236 tree t;
2237
2238 t = make_node (SRCLOC);
2239 SRCLOC_FILE (t) = file;
2240 SRCLOC_LINE (t) = line;
2241
2242 return t;
2243 }
2244
2245 tree
2246 build_srcloc_here ()
2247 {
2248 return build_srcloc (input_filename, lineno);
2249 }
2250
2251 /* The type of ARG when used as an lvalue. */
2252
2253 tree
2254 lvalue_type (arg)
2255 tree arg;
2256 {
2257 tree type = TREE_TYPE (arg);
2258 if (TREE_CODE (arg) == OVERLOAD)
2259 type = unknown_type_node;
2260 return type;
2261 }
2262
2263 /* The type of ARG for printing error messages; denote lvalues with
2264 reference types. */
2265
2266 tree
2267 error_type (arg)
2268 tree arg;
2269 {
2270 tree type = TREE_TYPE (arg);
2271 if (TREE_CODE (type) == ARRAY_TYPE)
2272 ;
2273 else if (real_lvalue_p (arg))
2274 type = build_reference_type (lvalue_type (arg));
2275 else if (IS_AGGR_TYPE (type))
2276 type = lvalue_type (arg);
2277
2278 return type;
2279 }
2280
2281 /* Does FUNCTION use a variable-length argument list? */
2282
2283 int
2284 varargs_function_p (function)
2285 tree function;
2286 {
2287 tree parm = TYPE_ARG_TYPES (TREE_TYPE (function));
2288 for (; parm; parm = TREE_CHAIN (parm))
2289 if (TREE_VALUE (parm) == void_type_node)
2290 return 0;
2291 return 1;
2292 }
2293
2294 /* Returns 1 if decl is a member of a class. */
2295
2296 int
2297 member_p (decl)
2298 tree decl;
2299 {
2300 tree ctx = DECL_CONTEXT (decl);
2301 return (ctx && TREE_CODE_CLASS (TREE_CODE (ctx)) == 't');
2302 }
2303
2304 /* Create a placeholder for member access where we don't actually have an
2305 object that the access is against. */
2306
2307 tree
2308 build_dummy_object (type)
2309 tree type;
2310 {
2311 tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node);
2312 return build_indirect_ref (decl, NULL_PTR);
2313 }
2314
2315 /* We've gotten a reference to a member of TYPE. Return *this if appropriate,
2316 or a dummy object otherwise. If BINFOP is non-0, it is filled with the
2317 binfo path from current_class_type to TYPE, or 0. */
2318
2319 tree
2320 maybe_dummy_object (type, binfop)
2321 tree type;
2322 tree *binfop;
2323 {
2324 tree decl, context;
2325
2326 if (current_class_type
2327 && get_base_distance (type, current_class_type, 0, binfop) != -1)
2328 context = current_class_type;
2329 else
2330 {
2331 /* Reference from a nested class member function. */
2332 context = type;
2333 if (binfop)
2334 *binfop = TYPE_BINFO (type);
2335 }
2336
2337 if (current_class_ref && context == current_class_type)
2338 decl = current_class_ref;
2339 else
2340 decl = build_dummy_object (context);
2341
2342 return decl;
2343 }
2344
2345 /* Returns 1 if OB is a placeholder object, or a pointer to one. */
2346
2347 int
2348 is_dummy_object (ob)
2349 tree ob;
2350 {
2351 if (TREE_CODE (ob) == INDIRECT_REF)
2352 ob = TREE_OPERAND (ob, 0);
2353 return (TREE_CODE (ob) == NOP_EXPR
2354 && TREE_OPERAND (ob, 0) == void_zero_node);
2355 }
2356
2357 /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */
2358
2359 int
2360 pod_type_p (t)
2361 tree t;
2362 {
2363 while (TREE_CODE (t) == ARRAY_TYPE)
2364 t = TREE_TYPE (t);
2365
2366 if (INTEGRAL_TYPE_P (t))
2367 return 1; /* integral, character or enumeral type */
2368 if (FLOAT_TYPE_P (t))
2369 return 1;
2370 if (TYPE_PTR_P (t))
2371 return 1; /* pointer to non-member */
2372 if (TYPE_PTRMEM_P (t))
2373 return 1; /* pointer to member object */
2374 if (TYPE_PTRMEMFUNC_P (t))
2375 return 1; /* pointer to member function */
2376
2377 if (! CLASS_TYPE_P (t))
2378 return 0; /* other non-class type (reference or function) */
2379 if (CLASSTYPE_NON_POD_P (t))
2380 return 0;
2381 return 1;
2382 }
2383
2384 /* Return a 1 if ATTR_NAME and ATTR_ARGS denote a valid C++-specific
2385 attribute for either declaration DECL or type TYPE and 0 otherwise.
2386 Plugged into valid_lang_attribute. */
2387
2388 int
2389 cp_valid_lang_attribute (attr_name, attr_args, decl, type)
2390 tree attr_name;
2391 tree attr_args ATTRIBUTE_UNUSED;
2392 tree decl ATTRIBUTE_UNUSED;
2393 tree type ATTRIBUTE_UNUSED;
2394 {
2395 if (is_attribute_p ("com_interface", attr_name))
2396 {
2397 if (! flag_vtable_thunks)
2398 {
2399 error ("`com_interface' only supported with -fvtable-thunks");
2400 return 0;
2401 }
2402
2403 if (attr_args != NULL_TREE
2404 || decl != NULL_TREE
2405 || ! CLASS_TYPE_P (type)
2406 || type != TYPE_MAIN_VARIANT (type))
2407 {
2408 warning ("`com_interface' attribute can only be applied to class definitions");
2409 return 0;
2410 }
2411
2412 CLASSTYPE_COM_INTERFACE (type) = 1;
2413 return 1;
2414 }
2415 else if (is_attribute_p ("init_priority", attr_name))
2416 {
2417 tree initp_expr = (attr_args ? TREE_VALUE (attr_args): NULL_TREE);
2418 int pri;
2419
2420 if (initp_expr)
2421 STRIP_NOPS (initp_expr);
2422
2423 if (!initp_expr || TREE_CODE (initp_expr) != INTEGER_CST)
2424 {
2425 error ("requested init_priority is not an integer constant");
2426 return 0;
2427 }
2428
2429 pri = TREE_INT_CST_LOW (initp_expr);
2430
2431 while (TREE_CODE (type) == ARRAY_TYPE)
2432 type = TREE_TYPE (type);
2433
2434 if (decl == NULL_TREE
2435 || TREE_CODE (decl) != VAR_DECL
2436 || ! TREE_STATIC (decl)
2437 || DECL_EXTERNAL (decl)
2438 || (TREE_CODE (type) != RECORD_TYPE
2439 && TREE_CODE (type) != UNION_TYPE)
2440 /* Static objects in functions are initialized the
2441 first time control passes through that
2442 function. This is not precise enough to pin down an
2443 init_priority value, so don't allow it. */
2444 || current_function_decl)
2445 {
2446 error ("can only use init_priority attribute on file-scope definitions of objects of class type");
2447 return 0;
2448 }
2449
2450 if (pri > MAX_INIT_PRIORITY || pri <= 0)
2451 {
2452 error ("requested init_priority is out of range");
2453 return 0;
2454 }
2455
2456 /* Check for init_priorities that are reserved for
2457 language and runtime support implementations.*/
2458 if (pri <= MAX_RESERVED_INIT_PRIORITY)
2459 {
2460 warning
2461 ("requested init_priority is reserved for internal use");
2462 }
2463
2464 DECL_INIT_PRIORITY (decl) = pri;
2465 return 1;
2466 }
2467
2468 return 0;
2469 }
2470
2471 /* Return a new PTRMEM_CST of the indicated TYPE. The MEMBER is the
2472 thing pointed to by the constant. */
2473
2474 tree
2475 make_ptrmem_cst (type, member)
2476 tree type;
2477 tree member;
2478 {
2479 tree ptrmem_cst = make_node (PTRMEM_CST);
2480 /* If would seem a great convenience if make_node would set
2481 TREE_CONSTANT for things of class `c', but it does not. */
2482 TREE_CONSTANT (ptrmem_cst) = 1;
2483 TREE_TYPE (ptrmem_cst) = type;
2484 PTRMEM_CST_MEMBER (ptrmem_cst) = member;
2485 return ptrmem_cst;
2486 }
2487
2488 /* Mark ARG (which is really a list_hash_table **) for GC. */
2489
2490 static void
2491 mark_list_hash (arg)
2492 void *arg;
2493 {
2494 struct list_hash *lh;
2495
2496 for (lh = * ((struct list_hash **) arg); lh; lh = lh->next)
2497 ggc_mark_tree (lh->list);
2498 }
2499
2500 /* Initialize tree.c. */
2501
2502 void
2503 init_tree ()
2504 {
2505 make_lang_type_fn = cp_make_lang_type;
2506 lang_unsave = cp_unsave;
2507 ggc_add_root (list_hash_table,
2508 sizeof (list_hash_table) / sizeof (struct list_hash *),
2509 sizeof (struct list_hash *),
2510 mark_list_hash);
2511 }
2512
2513 /* The SAVE_EXPR pointed to by TP is being copied. If ST contains
2514 information indicating to what new SAVE_EXPR this one should be
2515 mapped, use that one. Otherwise, create a new node and enter it in
2516 ST. FN is the function into which the copy will be placed. */
2517
2518 void
2519 remap_save_expr (tp, st, fn, walk_subtrees)
2520 tree *tp;
2521 splay_tree st;
2522 tree fn;
2523 int *walk_subtrees;
2524 {
2525 splay_tree_node n;
2526
2527 /* See if we already encountered this SAVE_EXPR. */
2528 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2529
2530 /* If we didn't already remap this SAVE_EXPR, do so now. */
2531 if (!n)
2532 {
2533 tree t = copy_node (*tp);
2534
2535 /* The SAVE_EXPR is now part of the function into which we
2536 are inlining this body. */
2537 SAVE_EXPR_CONTEXT (t) = fn;
2538 /* And we haven't evaluated it yet. */
2539 SAVE_EXPR_RTL (t) = NULL_RTX;
2540 /* Remember this SAVE_EXPR. */
2541 n = splay_tree_insert (st,
2542 (splay_tree_key) *tp,
2543 (splay_tree_value) t);
2544 }
2545 else
2546 /* We've already walked into this SAVE_EXPR, so we needn't do it
2547 again. */
2548 *walk_subtrees = 0;
2549
2550 /* Replace this SAVE_EXPR with the copy. */
2551 *tp = (tree) n->value;
2552 }
2553
2554 /* Called via walk_tree. If *TP points to a DECL_STMT for a local
2555 declaration, copies the declaration and enters it in the splay_tree
2556 pointed to by DATA (which is really a `splay_tree *'). */
2557
2558 static tree
2559 mark_local_for_remap_r (tp, walk_subtrees, data)
2560 tree *tp;
2561 int *walk_subtrees ATTRIBUTE_UNUSED;
2562 void *data;
2563 {
2564 tree t = *tp;
2565 splay_tree st = (splay_tree) data;
2566
2567 if ((TREE_CODE (t) == DECL_STMT
2568 && nonstatic_local_decl_p (DECL_STMT_DECL (t)))
2569 || TREE_CODE (t) == LABEL_STMT)
2570 {
2571 tree decl;
2572 tree copy;
2573
2574 /* Figure out what's being declared. */
2575 decl = (TREE_CODE (t) == DECL_STMT
2576 ? DECL_STMT_DECL (t) : LABEL_STMT_LABEL (t));
2577
2578 /* Make a copy. */
2579 copy = copy_decl_for_inlining (decl,
2580 DECL_CONTEXT (decl),
2581 DECL_CONTEXT (decl));
2582
2583 /* Remember the copy. */
2584 splay_tree_insert (st,
2585 (splay_tree_key) decl,
2586 (splay_tree_value) copy);
2587 }
2588
2589 return NULL_TREE;
2590 }
2591
2592 /* Called via walk_tree when an expression is unsaved. Using the
2593 splay_tree pointed to by ST (which is really a `splay_tree *'),
2594 remaps all local declarations to appropriate replacements. */
2595
2596 static tree
2597 cp_unsave_r (tp, walk_subtrees, data)
2598 tree *tp;
2599 int *walk_subtrees;
2600 void *data;
2601 {
2602 splay_tree st = (splay_tree) data;
2603 splay_tree_node n;
2604
2605 /* Only a local declaration (variable or label). */
2606 if (nonstatic_local_decl_p (*tp))
2607 {
2608 /* Lookup the declaration. */
2609 n = splay_tree_lookup (st, (splay_tree_key) *tp);
2610
2611 /* If it's there, remap it. */
2612 if (n)
2613 *tp = (tree) n->value;
2614 }
2615 else if (TREE_CODE (*tp) == SAVE_EXPR)
2616 remap_save_expr (tp, st, current_function_decl, walk_subtrees);
2617 else
2618 {
2619 copy_tree_r (tp, walk_subtrees, NULL);
2620
2621 /* Do whatever unsaving is required. */
2622 unsave_expr_1 (*tp);
2623 }
2624
2625 /* Keep iterating. */
2626 return NULL_TREE;
2627 }
2628
2629 /* Called by unsave_expr_now whenever an expression (*TP) needs to be
2630 unsaved. */
2631
2632 static void
2633 cp_unsave (tp)
2634 tree *tp;
2635 {
2636 splay_tree st;
2637
2638 /* Create a splay-tree to map old local variable declarations to new
2639 ones. */
2640 st = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
2641
2642 /* Walk the tree once figuring out what needs to be remapped. */
2643 walk_tree (tp, mark_local_for_remap_r, st);
2644
2645 /* Walk the tree again, copying, remapping, and unsaving. */
2646 walk_tree (tp, cp_unsave_r, st);
2647
2648 /* Clean up. */
2649 splay_tree_delete (st);
2650 }