75th Cygnus<->FSF merge
[gcc.git] / gcc / cp / call.c
1 /* Functions related to invoking methods and overloaded functions.
2 Copyright (C) 1987, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com) and
4 hacked by Brendan Kehoe (brendan@cygnus.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23
24 /* High-level class interface. */
25
26 #include "config.h"
27 #include "tree.h"
28 #include <stdio.h>
29 #include "cp-tree.h"
30 #include "class.h"
31 #include "output.h"
32 #include "flags.h"
33
34 #include "obstack.h"
35 #define obstack_chunk_alloc xmalloc
36 #define obstack_chunk_free free
37
38 extern void sorry ();
39
40 extern int inhibit_warnings;
41 extern int flag_assume_nonnull_objects;
42 extern tree ctor_label, dtor_label;
43
44 /* From typeck.c: */
45 extern tree unary_complex_lvalue ();
46
47 /* Compute the ease with which a conversion can be performed
48 between an expected and the given type. */
49 static struct harshness_code convert_harshness ();
50
51 #define EVIL_RETURN(ARG) ((ARG).code = EVIL_CODE, (ARG))
52 #define STD_RETURN(ARG) ((ARG).code = STD_CODE, (ARG))
53 #define QUAL_RETURN(ARG) ((ARG).code = QUAL_CODE, (ARG))
54 #define TRIVIAL_RETURN(ARG) ((ARG).code = TRIVIAL_CODE, (ARG))
55 #define ZERO_RETURN(ARG) ((ARG).code = 0, (ARG))
56
57 /* Ordering function for overload resolution. Compare two candidates
58 by gross quality. */
59 int
60 rank_for_overload (x, y)
61 struct candidate *x, *y;
62 {
63 if (y->h.code & (EVIL_CODE|ELLIPSIS_CODE|USER_CODE))
64 return y->h.code - x->h.code;
65 if (x->h.code & (EVIL_CODE|ELLIPSIS_CODE|USER_CODE))
66 return -1;
67
68 /* This is set by compute_conversion_costs, for calling a non-const
69 member function from a const member function. */
70 if ((y->harshness[0].code & CONST_CODE) ^ (x->harshness[0].code & CONST_CODE))
71 return y->harshness[0].code - x->harshness[0].code;
72
73 if (y->h.code & STD_CODE)
74 {
75 if (x->h.code & STD_CODE)
76 return y->h.distance - x->h.distance;
77 return 1;
78 }
79 if (x->h.code & STD_CODE)
80 return -1;
81
82 return y->h.code - x->h.code;
83 }
84
85 /* Compare two candidates, argument by argument. */
86 int
87 rank_for_ideal (x, y)
88 struct candidate *x, *y;
89 {
90 int i;
91
92 if (x->h_len != y->h_len)
93 abort ();
94
95 for (i = 0; i < x->h_len; i++)
96 {
97 if (y->harshness[i].code - x->harshness[i].code)
98 return y->harshness[i].code - x->harshness[i].code;
99 if ((y->harshness[i].code & STD_CODE)
100 && (y->harshness[i].distance - x->harshness[i].distance))
101 return y->harshness[i].distance - x->harshness[i].distance;
102
103 /* They're both the same code. Now see if we're dealing with an
104 integral promotion that needs a finer grain of accuracy. */
105 if (y->harshness[0].code & PROMO_CODE
106 && (y->harshness[i].int_penalty ^ x->harshness[i].int_penalty))
107 return y->harshness[i].int_penalty - x->harshness[i].int_penalty;
108 }
109 return 0;
110 }
111
112 /* TYPE is the type we wish to convert to. PARM is the parameter
113 we have to work with. We use a somewhat arbitrary cost function
114 to measure this conversion. */
115 static struct harshness_code
116 convert_harshness (type, parmtype, parm)
117 register tree type, parmtype;
118 tree parm;
119 {
120 struct harshness_code h;
121 register enum tree_code codel;
122 register enum tree_code coder;
123 int lvalue;
124
125 h.code = 0;
126 h.distance = 0;
127 h.int_penalty = 0;
128
129 #ifdef GATHER_STATISTICS
130 n_convert_harshness++;
131 #endif
132
133 if (TREE_CODE (parmtype) == REFERENCE_TYPE)
134 {
135 if (parm)
136 parm = convert_from_reference (parm);
137 parmtype = TREE_TYPE (parmtype);
138 lvalue = 1;
139 }
140 else if (parm)
141 lvalue = lvalue_p (parm);
142 else
143 lvalue = 0;
144
145 if (TYPE_PTRMEMFUNC_P (type))
146 type = TYPE_PTRMEMFUNC_FN_TYPE (type);
147 if (TYPE_PTRMEMFUNC_P (parmtype))
148 parmtype = TYPE_PTRMEMFUNC_FN_TYPE (parmtype);
149
150 codel = TREE_CODE (type);
151 coder = TREE_CODE (parmtype);
152
153 if (TYPE_MAIN_VARIANT (parmtype) == TYPE_MAIN_VARIANT (type))
154 return ZERO_RETURN (h);
155
156 if (coder == ERROR_MARK)
157 return EVIL_RETURN (h);
158
159 if (codel == REFERENCE_TYPE)
160 {
161 tree ttl, ttr;
162 int constp = parm ? TREE_READONLY (parm) : TYPE_READONLY (parmtype);
163 int volatilep = (parm ? TREE_THIS_VOLATILE (parm)
164 : TYPE_VOLATILE (parmtype));
165 register tree intype = TYPE_MAIN_VARIANT (parmtype);
166 register enum tree_code form = TREE_CODE (intype);
167 int penalty = 0;
168
169 ttl = TREE_TYPE (type);
170
171 /* Only allow const reference binding if we were given a parm to deal
172 with, since it isn't really a conversion. This is a hack to
173 prevent build_type_conversion from finding this conversion, but
174 still allow overloading to find it. */
175 if (! lvalue && ! (parm && TYPE_READONLY (ttl)))
176 return EVIL_RETURN (h);
177
178 if (TYPE_READONLY (ttl) < constp
179 || TYPE_VOLATILE (ttl) < volatilep)
180 return EVIL_RETURN (h);
181
182 /* When passing a non-const argument into a const reference, dig it a
183 little, so a non-const reference is preferred over this one. */
184 penalty = ((TYPE_READONLY (ttl) > constp)
185 + (TYPE_VOLATILE (ttl) > volatilep));
186
187 ttl = TYPE_MAIN_VARIANT (ttl);
188
189 if (form == OFFSET_TYPE)
190 {
191 intype = TREE_TYPE (intype);
192 form = TREE_CODE (intype);
193 }
194
195 ttr = intype;
196
197 /* Maybe handle conversion to base here? */
198
199 h = convert_harshness (ttl, ttr, NULL_TREE);
200 if (penalty && h.code == 0)
201 {
202 h.code = QUAL_CODE;
203 h.int_penalty = penalty;
204 }
205 return h;
206 }
207
208 if (codel == POINTER_TYPE && fntype_p (parmtype))
209 {
210 tree p1, p2;
211 struct harshness_code h1, h2;
212
213 /* Get to the METHOD_TYPE or FUNCTION_TYPE that this might be. */
214 type = TREE_TYPE (type);
215
216 if (coder == POINTER_TYPE)
217 {
218 parmtype = TREE_TYPE (parmtype);
219 coder = TREE_CODE (parmtype);
220 }
221
222 if (coder != TREE_CODE (type))
223 return EVIL_RETURN (h);
224
225 if (type != parmtype && coder == METHOD_TYPE)
226 {
227 tree ttl = TYPE_METHOD_BASETYPE (type);
228 tree ttr = TYPE_METHOD_BASETYPE (parmtype);
229
230 int b_or_d = get_base_distance (ttr, ttl, 0, 0);
231 if (b_or_d < 0)
232 {
233 b_or_d = get_base_distance (ttl, ttr, 0, 0);
234 if (b_or_d < 0)
235 return EVIL_RETURN (h);
236 h.distance = -b_or_d;
237 }
238 else
239 h.distance = b_or_d;
240 h.code = STD_CODE;
241
242 type = build_function_type
243 (TREE_TYPE (type), TREE_CHAIN (TYPE_ARG_TYPES (type)));
244 parmtype = build_function_type
245 (TREE_TYPE (parmtype), TREE_CHAIN (TYPE_ARG_TYPES (parmtype)));
246 }
247
248 /* We allow the default conversion between function type
249 and pointer-to-function type for free. */
250 if (comptypes (type, parmtype, 1))
251 return h;
252
253 if (pedantic)
254 return EVIL_RETURN (h);
255
256 /* Compare return types. */
257 p1 = TREE_TYPE (type);
258 p2 = TREE_TYPE (parmtype);
259 h2 = convert_harshness (p1, p2, NULL_TREE);
260 if (h2.code & EVIL_CODE)
261 return h2;
262
263 h1.code = TRIVIAL_CODE;
264 h1.distance = 0;
265
266 if (h2.distance != 0)
267 {
268 tree binfo;
269
270 /* This only works for pointers. */
271 if (TREE_CODE (p1) != POINTER_TYPE
272 && TREE_CODE (p1) != REFERENCE_TYPE)
273 return EVIL_RETURN (h);
274
275 p1 = TREE_TYPE (p1);
276 p2 = TREE_TYPE (p2);
277 /* Don't die if we happen to be dealing with void*. */
278 if (!IS_AGGR_TYPE (p1) || !IS_AGGR_TYPE (p2))
279 return EVIL_RETURN (h);
280 if (h2.distance < 0)
281 binfo = get_binfo (p2, p1, 0);
282 else
283 binfo = get_binfo (p1, p2, 0);
284
285 if (! BINFO_OFFSET_ZEROP (binfo))
286 {
287 #if 0
288 static int explained = 0;
289 if (h2.distance < 0)
290 message_2_types (sorry, "cannot cast `%s' to `%s' at function call site", p2, p1);
291 else
292 message_2_types (sorry, "cannot cast `%s' to `%s' at function call site", p1, p2);
293
294 if (! explained++)
295 sorry ("(because pointer values change during conversion)");
296 #endif
297 return EVIL_RETURN (h);
298 }
299 }
300
301 h1.code |= h2.code;
302 if (h2.distance > h1.distance)
303 h1.distance = h2.distance;
304
305 p1 = TYPE_ARG_TYPES (type);
306 p2 = TYPE_ARG_TYPES (parmtype);
307 while (p1 && TREE_VALUE (p1) != void_type_node
308 && p2 && TREE_VALUE (p2) != void_type_node)
309 {
310 h2 = convert_harshness (TREE_VALUE (p1), TREE_VALUE (p2),
311 NULL_TREE);
312 if (h2.code & EVIL_CODE)
313 return h2;
314
315 if (h2.distance)
316 {
317 /* This only works for pointers and references. */
318 if (TREE_CODE (TREE_VALUE (p1)) != POINTER_TYPE
319 && TREE_CODE (TREE_VALUE (p1)) != REFERENCE_TYPE)
320 return EVIL_RETURN (h);
321 h2.distance = - h2.distance;
322 }
323
324 h1.code |= h2.code;
325 if (h2.distance > h1.distance)
326 h1.distance = h2.distance;
327 p1 = TREE_CHAIN (p1);
328 p2 = TREE_CHAIN (p2);
329 }
330 if (p1 == p2)
331 return h1;
332 if (p2)
333 {
334 if (p1)
335 return EVIL_RETURN (h);
336 h1.code |= ELLIPSIS_CODE;
337 return h1;
338 }
339 if (p1)
340 {
341 if (TREE_PURPOSE (p1) == NULL_TREE)
342 h1.code |= EVIL_CODE;
343 return h1;
344 }
345 }
346 else if (codel == POINTER_TYPE && coder == OFFSET_TYPE)
347 {
348 tree ttl, ttr;
349
350 /* Get to the OFFSET_TYPE that this might be. */
351 type = TREE_TYPE (type);
352
353 if (coder != TREE_CODE (type))
354 return EVIL_RETURN (h);
355
356 ttl = TYPE_OFFSET_BASETYPE (type);
357 ttr = TYPE_OFFSET_BASETYPE (parmtype);
358
359 if (ttl == ttr)
360 h.code = 0;
361 else
362 {
363 int b_or_d = get_base_distance (ttr, ttl, 0, 0);
364 if (b_or_d < 0)
365 {
366 b_or_d = get_base_distance (ttl, ttr, 0, 0);
367 if (b_or_d < 0)
368 return EVIL_RETURN (h);
369 h.distance = -b_or_d;
370 }
371 else
372 h.distance = b_or_d;
373 h.code = STD_CODE;
374 }
375
376 /* Now test the OFFSET_TYPE's target compatibility. */
377 type = TREE_TYPE (type);
378 parmtype = TREE_TYPE (parmtype);
379 }
380
381 if (coder == UNKNOWN_TYPE)
382 {
383 if (codel == FUNCTION_TYPE
384 || codel == METHOD_TYPE
385 || (codel == POINTER_TYPE
386 && (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE
387 || TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)))
388 return TRIVIAL_RETURN (h);
389 return EVIL_RETURN (h);
390 }
391
392 if (coder == VOID_TYPE)
393 return EVIL_RETURN (h);
394
395 if (codel == BOOLEAN_TYPE)
396 {
397 if (INTEGRAL_CODE_P (coder) || coder == REAL_TYPE)
398 return STD_RETURN (h);
399 else if (coder == POINTER_TYPE || coder == OFFSET_TYPE)
400 {
401 /* Make this worse than any conversion to another pointer.
402 FIXME this is how I think the language should work, but it may not
403 end up being how the language is standardized (jason 1/30/95). */
404 h.distance = 32767;
405 return STD_RETURN (h);
406 }
407 return EVIL_RETURN (h);
408 }
409
410 if (INTEGRAL_CODE_P (codel))
411 {
412 /* Control equivalence of ints an enums. */
413
414 if (codel == ENUMERAL_TYPE
415 && flag_int_enum_equivalence == 0)
416 {
417 /* Enums can be converted to ints, but not vice-versa. */
418 if (coder != ENUMERAL_TYPE
419 || TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (parmtype))
420 return EVIL_RETURN (h);
421 }
422
423 /* else enums and ints (almost) freely interconvert. */
424
425 if (INTEGRAL_CODE_P (coder))
426 {
427 if (TYPE_MAIN_VARIANT (type)
428 == TYPE_MAIN_VARIANT (type_promotes_to (parmtype)))
429 {
430 h.code = PROMO_CODE;
431 #if 0 /* What purpose does this serve? -jason */
432 /* A char, short, wchar_t, etc., should promote to an int if
433 it can handle it, otherwise to an unsigned. So we'll make
434 an unsigned. */
435 if (type != integer_type_node)
436 h.int_penalty = 1;
437 #endif
438 }
439 else
440 h.code = STD_CODE;
441
442 return h;
443 }
444 else if (coder == REAL_TYPE)
445 {
446 h.code = STD_CODE;
447 h.distance = 0;
448 return h;
449 }
450 }
451
452 if (codel == REAL_TYPE)
453 {
454 if (coder == REAL_TYPE)
455 {
456 if (TYPE_MAIN_VARIANT (type)
457 == TYPE_MAIN_VARIANT (type_promotes_to (parmtype)))
458 h.code = PROMO_CODE;
459 else
460 h.code = STD_CODE;
461
462 return h;
463 }
464 else if (INTEGRAL_CODE_P (coder))
465 {
466 h.code = STD_CODE;
467 h.distance = 0;
468 return h;
469 }
470 }
471
472 /* Convert arrays which have not previously been converted. */
473 #if 0
474 if (codel == ARRAY_TYPE)
475 codel = POINTER_TYPE;
476 #endif
477 if (coder == ARRAY_TYPE)
478 {
479 coder = POINTER_TYPE;
480 if (parm)
481 {
482 parm = decay_conversion (parm);
483 parmtype = TREE_TYPE (parm);
484 }
485 else
486 parmtype = build_pointer_type (TREE_TYPE (parmtype));
487 }
488
489 /* Conversions among pointers */
490 if (codel == POINTER_TYPE && coder == POINTER_TYPE)
491 {
492 register tree ttl = TYPE_MAIN_VARIANT (TREE_TYPE (type));
493 register tree ttr = TYPE_MAIN_VARIANT (TREE_TYPE (parmtype));
494 int penalty = 4 * (ttl != ttr);
495
496 /* Anything converts to void *. Since this may be `const void *'
497 (etc.) use VOID_TYPE instead of void_type_node. Otherwise, the
498 targets must be the same, except that we do allow (at some cost)
499 conversion between signed and unsigned pointer types. */
500
501 if ((TREE_CODE (ttl) == METHOD_TYPE
502 || TREE_CODE (ttl) == FUNCTION_TYPE)
503 && TREE_CODE (ttl) == TREE_CODE (ttr))
504 {
505 if (comptypes (ttl, ttr, -1))
506 {
507 h.code = penalty ? STD_CODE : 0;
508 h.distance = 0;
509 }
510 else
511 h.code = EVIL_CODE;
512 return h;
513 }
514
515 #if 1
516 if (TREE_CODE (ttl) != VOID_TYPE
517 && (TREE_CODE (ttr) != VOID_TYPE || !parm || !integer_zerop (parm)))
518 {
519 if (TREE_UNSIGNED (ttl) != TREE_UNSIGNED (ttr))
520 {
521 ttl = unsigned_type (ttl);
522 ttr = unsigned_type (ttr);
523 penalty = 10;
524 }
525 if (comp_target_types (type, parmtype, 1) <= 0)
526 return EVIL_RETURN (h);
527 }
528 #else
529 if (!(TREE_CODE (ttl) == VOID_TYPE
530 || TREE_CODE (ttr) == VOID_TYPE
531 || (TREE_UNSIGNED (ttl) ^ TREE_UNSIGNED (ttr)
532 && (ttl = unsigned_type (ttl),
533 ttr = unsigned_type (ttr),
534 penalty = 10, 0))
535 || (comp_target_types (ttl, ttr, 0) > 0)))
536 return EVIL_RETURN (h);
537 #endif
538
539 if (penalty == 10 || ttr == ttl)
540 {
541 tree tmp1 = TREE_TYPE (type), tmp2 = TREE_TYPE (parmtype);
542
543 /* If one was unsigned but the other wasn't, then we need to
544 do a standard conversion from T to unsigned T. */
545 if (penalty == 10)
546 h.code = PROMO_CODE; /* was STD_CODE */
547 else
548 h.code = 0;
549
550 /* Note conversion from `T*' to `const T*',
551 or `T*' to `volatile T*'. */
552 if (ttl == ttr
553 && ((TYPE_READONLY (tmp1) != TREE_READONLY (tmp2))
554 || (TYPE_VOLATILE (tmp1) != TYPE_VOLATILE (tmp2))))
555 h.code |= QUAL_CODE;
556
557 h.distance = 0;
558 return h;
559 }
560
561
562 if (TREE_CODE (ttl) == RECORD_TYPE && TREE_CODE (ttr) == RECORD_TYPE)
563 {
564 int b_or_d = get_base_distance (ttl, ttr, 0, 0);
565 if (b_or_d < 0)
566 {
567 b_or_d = get_base_distance (ttr, ttl, 0, 0);
568 if (b_or_d < 0)
569 return EVIL_RETURN (h);
570 h.distance = -b_or_d;
571 }
572 else
573 h.distance = b_or_d;
574 h.code = STD_CODE;
575 return h;
576 }
577
578 /* If converting from a `class*' to a `void*', make it
579 less favorable than any inheritance relationship. */
580 if (TREE_CODE (ttl) == VOID_TYPE && IS_AGGR_TYPE (ttr))
581 {
582 h.code = STD_CODE;
583 h.distance = CLASSTYPE_MAX_DEPTH (ttr)+1;
584 return h;
585 }
586
587 h.code = penalty ? STD_CODE : PROMO_CODE;
588 /* Catch things like `const char *' -> `const void *'
589 vs `const char *' -> `void *'. */
590 if (ttl != ttr)
591 {
592 tree tmp1 = TREE_TYPE (type), tmp2 = TREE_TYPE (parmtype);
593 if ((TYPE_READONLY (tmp1) != TREE_READONLY (tmp2))
594 || (TYPE_VOLATILE (tmp1) != TYPE_VOLATILE (tmp2)))
595 h.code |= QUAL_CODE;
596 }
597 return h;
598 }
599
600 if (codel == POINTER_TYPE && coder == INTEGER_TYPE)
601 {
602 /* This is not a bad match, but don't let it beat
603 integer-enum combinations. */
604 if (parm && integer_zerop (parm))
605 {
606 h.code = STD_CODE;
607 h.distance = 0;
608 return h;
609 }
610 }
611
612 /* C++: Since the `this' parameter of a signature member function
613 is represented as a signature pointer to handle default implementations
614 correctly, we can have the case that `type' is a signature pointer
615 while `parmtype' is a pointer to a signature table. We don't really
616 do any conversions in this case, so just return 0. */
617
618 if (codel == RECORD_TYPE && coder == POINTER_TYPE
619 && IS_SIGNATURE_POINTER (type) && IS_SIGNATURE (TREE_TYPE (parmtype)))
620 return ZERO_RETURN (h);
621
622 if (codel == RECORD_TYPE && coder == RECORD_TYPE)
623 {
624 int b_or_d = get_base_distance (type, parmtype, 0, 0);
625 if (b_or_d < 0)
626 {
627 b_or_d = get_base_distance (parmtype, type, 0, 0);
628 if (b_or_d < 0)
629 return EVIL_RETURN (h);
630 h.distance = -b_or_d;
631 }
632 else
633 h.distance = b_or_d;
634 h.code = STD_CODE;
635 return h;
636 }
637 return EVIL_RETURN (h);
638 }
639
640 /* A clone of build_type_conversion for checking user-defined conversions in
641 overload resolution. */
642
643 int
644 user_harshness (type, parmtype, parm)
645 register tree type, parmtype;
646 tree parm;
647 {
648 tree conv;
649 tree winner = NULL_TREE;
650 int code;
651
652 {
653 tree typename = build_typename_overload (type);
654 if (lookup_fnfields (TYPE_BINFO (parmtype), typename, 0))
655 return 0;
656 }
657
658 for (conv = lookup_conversions (parmtype); conv; conv = TREE_CHAIN (conv))
659 {
660 struct harshness_code tmp;
661
662 if (winner && TREE_PURPOSE (winner) == TREE_PURPOSE (conv))
663 continue;
664
665 if (tmp = convert_harshness (type, TREE_VALUE (conv), NULL_TREE),
666 tmp.code < USER_CODE && tmp.distance >= 0)
667 {
668 if (winner)
669 return EVIL_CODE;
670 else
671 {
672 winner = conv;
673 code = tmp.code;
674 }
675 }
676 }
677
678 if (winner)
679 return code;
680
681 return -1;
682 }
683
684 int
685 can_convert (to, from)
686 tree to, from;
687 {
688 struct harshness_code h;
689 h = convert_harshness (to, from, NULL_TREE);
690 return h.code < USER_CODE && h.distance >= 0;
691 }
692
693 int
694 can_convert_arg (to, from, arg)
695 tree to, from, arg;
696 {
697 struct harshness_code h;
698 h = convert_harshness (to, from, arg);
699 return h.code < USER_CODE && h.distance >= 0;
700 }
701
702 #ifdef DEBUG_MATCHING
703 static char *
704 print_harshness (h)
705 struct harshness_code *h;
706 {
707 static char buf[1024];
708 char tmp[1024];
709
710 bzero (buf, 1024 * sizeof (char));
711 strcat (buf, "codes=[");
712 if (h->code & EVIL_CODE)
713 strcat (buf, "EVIL");
714 if (h->code & CONST_CODE)
715 strcat (buf, " CONST");
716 if (h->code & ELLIPSIS_CODE)
717 strcat (buf, " ELLIPSIS");
718 if (h->code & USER_CODE)
719 strcat (buf, " USER");
720 if (h->code & STD_CODE)
721 strcat (buf, " STD");
722 if (h->code & PROMO_CODE)
723 strcat (buf, " PROMO");
724 if (h->code & QUAL_CODE)
725 strcat (buf, " QUAL");
726 if (h->code & TRIVIAL_CODE)
727 strcat (buf, " TRIVIAL");
728 if (buf[0] == '\0')
729 strcat (buf, "0");
730
731 sprintf (tmp, "] distance=%d int_penalty=%d", h->distance, h->int_penalty);
732
733 strcat (buf, tmp);
734
735 return buf;
736 }
737 #endif
738
739 /* Algorithm: For each argument, calculate how difficult it is to
740 make FUNCTION accept that argument. If we can easily tell that
741 FUNCTION won't be acceptable to one of the arguments, then we
742 don't need to compute the ease of converting the other arguments,
743 since it will never show up in the intersection of all arguments'
744 favorite functions.
745
746 Conversions between builtin and user-defined types are allowed, but
747 no function involving such a conversion is preferred to one which
748 does not require such a conversion. Furthermore, such conversions
749 must be unique. */
750
751 void
752 compute_conversion_costs (function, tta_in, cp, arglen)
753 tree function;
754 tree tta_in;
755 struct candidate *cp;
756 int arglen;
757 {
758 tree ttf_in = TYPE_ARG_TYPES (TREE_TYPE (function));
759 tree ttf = ttf_in;
760 tree tta = tta_in;
761
762 /* Start out with no strikes against. */
763 int evil_strikes = 0;
764 int ellipsis_strikes = 0;
765 int user_strikes = 0;
766 int b_or_d_strikes = 0;
767 int easy_strikes = 0;
768
769 int strike_index = 0, win;
770 struct harshness_code lose;
771 extern int cp_silent;
772
773 #ifdef GATHER_STATISTICS
774 n_compute_conversion_costs++;
775 #endif
776
777 #ifndef DEBUG_MATCHING
778 /* We don't emit any warnings or errors while trying out each candidate. */
779 cp_silent = 1;
780 #endif
781
782 cp->function = function;
783 cp->arg = tta ? TREE_VALUE (tta) : NULL_TREE;
784 cp->u.bad_arg = 0; /* optimistic! */
785
786 cp->h.code = 0;
787 cp->h.distance = 0;
788 cp->h.int_penalty = 0;
789 bzero ((char *) cp->harshness,
790 (cp->h_len + 1) * sizeof (struct harshness_code));
791
792 while (ttf && tta)
793 {
794 struct harshness_code h;
795
796 if (ttf == void_list_node)
797 break;
798
799 if (type_unknown_p (TREE_VALUE (tta)))
800 {
801 /* Must perform some instantiation here. */
802 tree rhs = TREE_VALUE (tta);
803 tree lhstype = TREE_VALUE (ttf);
804
805 /* Keep quiet about possible contravariance violations. */
806 int old_inhibit_warnings = inhibit_warnings;
807 inhibit_warnings = 1;
808
809 /* @@ This is to undo what `grokdeclarator' does to
810 parameter types. It really should go through
811 something more general. */
812
813 TREE_TYPE (tta) = unknown_type_node;
814 rhs = instantiate_type (lhstype, rhs, 0);
815 inhibit_warnings = old_inhibit_warnings;
816
817 if (TREE_CODE (rhs) == ERROR_MARK)
818 h.code = EVIL_CODE;
819 else
820 h = convert_harshness (lhstype, TREE_TYPE (rhs), rhs);
821 }
822 else
823 {
824 #ifdef DEBUG_MATCHING
825 static tree old_function = NULL_TREE;
826
827 if (!old_function || function != old_function)
828 {
829 cp_error ("trying %D", function);
830 old_function = function;
831 }
832
833 cp_error (" doing (%T) %E against arg %T",
834 TREE_TYPE (TREE_VALUE (tta)), TREE_VALUE (tta),
835 TREE_VALUE (ttf));
836 #endif
837
838 h = convert_harshness (TREE_VALUE (ttf),
839 TREE_TYPE (TREE_VALUE (tta)),
840 TREE_VALUE (tta));
841
842 #ifdef DEBUG_MATCHING
843 cp_error (" evaluated %s", print_harshness (&h));
844 #endif
845 }
846
847 cp->harshness[strike_index] = h;
848 if ((h.code & EVIL_CODE)
849 || ((h.code & STD_CODE) && h.distance < 0))
850 {
851 cp->u.bad_arg = strike_index;
852 evil_strikes = 1;
853 }
854 else if (h.code & ELLIPSIS_CODE)
855 ellipsis_strikes += 1;
856 #if 0
857 /* This is never set by `convert_harshness'. */
858 else if (h.code & USER_CODE)
859 {
860 user_strikes += 1;
861 }
862 #endif
863 else
864 {
865 if ((h.code & STD_CODE) && h.distance)
866 {
867 if (h.distance > b_or_d_strikes)
868 b_or_d_strikes = h.distance;
869 }
870 else
871 easy_strikes += (h.code & (STD_CODE|PROMO_CODE|TRIVIAL_CODE));
872 cp->h.code |= h.code;
873 /* Make sure we communicate this. */
874 cp->h.int_penalty += h.int_penalty;
875 }
876
877 ttf = TREE_CHAIN (ttf);
878 tta = TREE_CHAIN (tta);
879 strike_index += 1;
880 }
881
882 if (tta)
883 {
884 /* ran out of formals, and parmlist is fixed size. */
885 if (ttf /* == void_type_node */)
886 {
887 cp->h.code = EVIL_CODE;
888 cp->u.bad_arg = -1;
889 cp_silent = 0;
890 return;
891 }
892 else
893 {
894 struct harshness_code h;
895 int l = list_length (tta);
896 ellipsis_strikes += l;
897 h.code = ELLIPSIS_CODE;
898 h.distance = 0;
899 h.int_penalty = 0;
900 for (; l; --l)
901 cp->harshness[strike_index++] = h;
902 }
903 }
904 else if (ttf && ttf != void_list_node)
905 {
906 /* ran out of actuals, and no defaults. */
907 if (TREE_PURPOSE (ttf) == NULL_TREE)
908 {
909 cp->h.code = EVIL_CODE;
910 cp->u.bad_arg = -2;
911 cp_silent = 0;
912 return;
913 }
914 /* Store index of first default. */
915 cp->harshness[arglen].distance = strike_index+1;
916 }
917 else
918 cp->harshness[arglen].distance = 0;
919
920 /* Argument list lengths work out, so don't need to check them again. */
921 if (evil_strikes)
922 {
923 /* We do not check for derived->base conversions here, since in
924 no case would they give evil strike counts, unless such conversions
925 are somehow ambiguous. */
926
927 /* See if any user-defined conversions apply.
928 But make sure that we do not loop. */
929 static int dont_convert_types = 0;
930
931 if (dont_convert_types)
932 {
933 cp->h.code = EVIL_CODE;
934 cp_silent = 0;
935 return;
936 }
937
938 win = 0; /* Only get one chance to win. */
939 ttf = TYPE_ARG_TYPES (TREE_TYPE (function));
940 tta = tta_in;
941 strike_index = 0;
942 evil_strikes = 0;
943
944 while (ttf && tta)
945 {
946 if (ttf == void_list_node)
947 break;
948
949 lose = cp->harshness[strike_index];
950 if ((lose.code & EVIL_CODE)
951 || ((lose.code & STD_CODE) && lose.distance < 0))
952 {
953 tree actual_type = TREE_TYPE (TREE_VALUE (tta));
954 tree formal_type = TREE_VALUE (ttf);
955 int extra_conversions = 0;
956
957 dont_convert_types = 1;
958
959 if (TREE_CODE (formal_type) == REFERENCE_TYPE)
960 formal_type = TREE_TYPE (formal_type);
961 if (TREE_CODE (actual_type) == REFERENCE_TYPE)
962 actual_type = TREE_TYPE (actual_type);
963
964 if (formal_type != error_mark_node
965 && actual_type != error_mark_node)
966 {
967 formal_type = TYPE_MAIN_VARIANT (formal_type);
968 actual_type = TYPE_MAIN_VARIANT (actual_type);
969
970 if (TYPE_HAS_CONSTRUCTOR (formal_type))
971 {
972 /* If it has a constructor for this type,
973 try to use it. */
974 /* @@ There is no way to save this result yet, so
975 success is a NULL_TREE for now. */
976 if (convert_to_aggr (formal_type, TREE_VALUE (tta), 0, 1)
977 != error_mark_node)
978 win++;
979 }
980 if (TYPE_LANG_SPECIFIC (actual_type)
981 && TYPE_HAS_CONVERSION (actual_type))
982 {
983 int extra = user_harshness (formal_type, actual_type);
984
985 if (extra == EVIL_CODE)
986 win += 2;
987 else if (extra >= 0)
988 {
989 win++;
990 extra_conversions = extra;
991 }
992 }
993 }
994 dont_convert_types = 0;
995
996 if (win == 1)
997 {
998 user_strikes += 1;
999 cp->harshness[strike_index].code
1000 = USER_CODE | (extra_conversions ? STD_CODE : 0);
1001 win = 0;
1002 }
1003 else
1004 {
1005 if (cp->u.bad_arg > strike_index)
1006 cp->u.bad_arg = strike_index;
1007
1008 evil_strikes = win ? 2 : 1;
1009 break;
1010 }
1011 }
1012
1013 ttf = TREE_CHAIN (ttf);
1014 tta = TREE_CHAIN (tta);
1015 strike_index += 1;
1016 }
1017 }
1018
1019 /* Const member functions get a small penalty because defaulting
1020 to const is less useful than defaulting to non-const. */
1021 /* This is bogus, it does not correspond to anything in the ARM.
1022 This code will be fixed when this entire section is rewritten
1023 to conform to the ARM. (mrs) */
1024 if (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE)
1025 {
1026 tree this_parm = TREE_VALUE (ttf_in);
1027
1028 if (TREE_CODE (this_parm) == RECORD_TYPE /* Is `this' a sig ptr? */
1029 ? TYPE_READONLY (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (this_parm))))
1030 : TYPE_READONLY (TREE_TYPE (this_parm)))
1031 {
1032 cp->harshness[0].code |= TRIVIAL_CODE;
1033 ++easy_strikes;
1034 }
1035 else
1036 {
1037 /* Calling a non-const member function from a const member function
1038 is probably invalid, but for now we let it only draw a warning.
1039 We indicate that such a mismatch has occurred by setting the
1040 harshness to a maximum value. */
1041 if (TREE_CODE (TREE_TYPE (TREE_VALUE (tta_in))) == POINTER_TYPE
1042 && (TYPE_READONLY (TREE_TYPE (TREE_TYPE (TREE_VALUE (tta_in))))))
1043 cp->harshness[0].code |= CONST_CODE;
1044 }
1045 }
1046
1047 if (evil_strikes)
1048 cp->h.code = EVIL_CODE;
1049 if (ellipsis_strikes)
1050 cp->h.code |= ELLIPSIS_CODE;
1051 if (user_strikes)
1052 cp->h.code |= USER_CODE;
1053 cp_silent = 0;
1054 #ifdef DEBUG_MATCHING
1055 cp_error ("final eval %s", print_harshness (&cp->h));
1056 #endif
1057 }
1058
1059 /* Subroutine of ideal_candidate. See if X or Y is a better match
1060 than the other. */
1061 static int
1062 strictly_better (x, y)
1063 unsigned short x, y;
1064 {
1065 unsigned short xor;
1066
1067 if (x == y)
1068 return 0;
1069
1070 xor = x ^ y;
1071 if (xor >= x || xor >= y)
1072 return 1;
1073 return 0;
1074 }
1075
1076 /* When one of several possible overloaded functions and/or methods
1077 can be called, choose the best candidate for overloading.
1078
1079 BASETYPE is the context from which we start method resolution
1080 or NULL if we are comparing overloaded functions.
1081 CANDIDATES is the array of candidates we have to choose from.
1082 N_CANDIDATES is the length of CANDIDATES.
1083 PARMS is a TREE_LIST of parameters to the function we'll ultimately
1084 choose. It is modified in place when resolving methods. It is not
1085 modified in place when resolving overloaded functions.
1086 LEN is the length of the parameter list. */
1087
1088 static struct candidate *
1089 ideal_candidate (basetype, candidates, n_candidates, parms, len)
1090 tree basetype;
1091 struct candidate *candidates;
1092 int n_candidates;
1093 tree parms;
1094 int len;
1095 {
1096 struct candidate *cp = candidates+n_candidates;
1097 int i, j = -1, best_code;
1098
1099 /* For each argument, sort the functions from best to worst for the arg.
1100 For each function that's not best for this arg, set its overall
1101 harshness to EVIL so that other args won't like it. The candidate
1102 list for the last argument is the intersection of all the best-liked
1103 functions. */
1104
1105 #if 0
1106 for (i = 0; i < len; i++)
1107 {
1108 qsort (candidates, n_candidates, sizeof (struct candidate),
1109 rank_for_overload);
1110 best_code = cp[-1].h.code;
1111
1112 /* To find out functions that are worse than that represented
1113 by BEST_CODE, we can't just do a comparison like h.code>best_code.
1114 The total harshness for the "best" fn may be 8|8 for two args, and
1115 the harshness for the next-best may be 8|2. If we just compared,
1116 that would be checking 8>10, which would lead to the next-best
1117 being disqualified. What we actually want to do is get rid
1118 of functions that are definitely worse than that represented
1119 by best_code, i.e. those which have bits set higher than the
1120 highest in best_code. Sooooo, what we do is clear out everything
1121 represented by best_code, and see if we still come up with something
1122 higher. If so (e.g., 8|8 vs 8|16), it'll disqualify it properly. */
1123 for (j = n_candidates-2; j >= 0; j--)
1124 if ((candidates[j].h.code & ~best_code) > best_code)
1125 candidates[j].h.code = EVIL_CODE;
1126 }
1127
1128 if (cp[-1].h.code & EVIL_CODE)
1129 return NULL;
1130 #else
1131 qsort (candidates, n_candidates, sizeof (struct candidate),
1132 rank_for_overload);
1133 best_code = cp[-1].h.code;
1134 #endif
1135
1136 /* If they're at least as good as each other, do an arg-by-arg check. */
1137 if (! strictly_better (cp[-1].h.code, cp[-2].h.code))
1138 {
1139 int better = 0;
1140 int worse = 0;
1141
1142 for (j = 0; j < n_candidates; j++)
1143 if (! strictly_better (candidates[j].h.code, best_code))
1144 break;
1145
1146 qsort (candidates+j, n_candidates-j, sizeof (struct candidate),
1147 rank_for_ideal);
1148 for (i = 0; i < len; i++)
1149 {
1150 if (cp[-1].harshness[i].code < cp[-2].harshness[i].code)
1151 better = 1;
1152 else if (cp[-1].harshness[i].code > cp[-2].harshness[i].code)
1153 worse = 1;
1154 else if (cp[-1].harshness[i].code & STD_CODE)
1155 {
1156 /* If it involves a standard conversion, let the
1157 inheritance lattice be the final arbiter. */
1158 if (cp[-1].harshness[i].distance > cp[-2].harshness[i].distance)
1159 worse = 1;
1160 else if (cp[-1].harshness[i].distance < cp[-2].harshness[i].distance)
1161 better = 1;
1162 }
1163 else if (cp[-1].harshness[i].code & PROMO_CODE)
1164 {
1165 /* For integral promotions, take into account a finer
1166 granularity for determining which types should be favored
1167 over others in such promotions. */
1168 if (cp[-1].harshness[i].int_penalty > cp[-2].harshness[i].int_penalty)
1169 worse = 1;
1170 else if (cp[-1].harshness[i].int_penalty < cp[-2].harshness[i].int_penalty)
1171 better = 1;
1172 }
1173 }
1174
1175 if (! better || worse)
1176 return NULL;
1177 }
1178 return cp-1;
1179 }
1180
1181 /* Assume that if the class referred to is not in the
1182 current class hierarchy, that it may be remote.
1183 PARENT is assumed to be of aggregate type here. */
1184 static int
1185 may_be_remote (parent)
1186 tree parent;
1187 {
1188 if (TYPE_OVERLOADS_METHOD_CALL_EXPR (parent) == 0)
1189 return 0;
1190
1191 if (current_class_type == NULL_TREE)
1192 return 0;
1193
1194 if (parent == current_class_type)
1195 return 0;
1196
1197 if (UNIQUELY_DERIVED_FROM_P (parent, current_class_type))
1198 return 0;
1199 return 1;
1200 }
1201
1202 tree
1203 build_vfield_ref (datum, type)
1204 tree datum, type;
1205 {
1206 tree rval;
1207 int old_assume_nonnull_objects = flag_assume_nonnull_objects;
1208
1209 if (datum == error_mark_node)
1210 return error_mark_node;
1211
1212 /* Vtable references are always made from non-null objects. */
1213 flag_assume_nonnull_objects = 1;
1214 if (TREE_CODE (TREE_TYPE (datum)) == REFERENCE_TYPE)
1215 datum = convert_from_reference (datum);
1216
1217 if (! TYPE_USES_COMPLEX_INHERITANCE (type))
1218 rval = build (COMPONENT_REF, TREE_TYPE (CLASSTYPE_VFIELD (type)),
1219 datum, CLASSTYPE_VFIELD (type));
1220 else
1221 rval = build_component_ref (datum, DECL_NAME (CLASSTYPE_VFIELD (type)), 0, 0);
1222 flag_assume_nonnull_objects = old_assume_nonnull_objects;
1223
1224 return rval;
1225 }
1226
1227 /* Build a call to a member of an object. I.e., one that overloads
1228 operator ()(), or is a pointer-to-function or pointer-to-method. */
1229 static tree
1230 build_field_call (basetype_path, instance_ptr, name, parms)
1231 tree basetype_path, instance_ptr, name, parms;
1232 {
1233 tree field, instance;
1234
1235 if (instance_ptr == current_class_decl)
1236 {
1237 /* Check to see if we really have a reference to an instance variable
1238 with `operator()()' overloaded. */
1239 field = IDENTIFIER_CLASS_VALUE (name);
1240
1241 if (field == NULL_TREE)
1242 {
1243 cp_error ("`this' has no member named `%D'", name);
1244 return error_mark_node;
1245 }
1246
1247 if (TREE_CODE (field) == FIELD_DECL)
1248 {
1249 /* If it's a field, try overloading operator (),
1250 or calling if the field is a pointer-to-function. */
1251 instance = build_component_ref_1 (C_C_D, field, 0);
1252 if (instance == error_mark_node)
1253 return error_mark_node;
1254
1255 if (TYPE_LANG_SPECIFIC (TREE_TYPE (instance))
1256 && TYPE_OVERLOADS_CALL_EXPR (TREE_TYPE (instance)))
1257 return build_opfncall (CALL_EXPR, LOOKUP_NORMAL, instance, parms, NULL_TREE);
1258
1259 if (TREE_CODE (TREE_TYPE (instance)) == POINTER_TYPE)
1260 {
1261 if (TREE_CODE (TREE_TYPE (TREE_TYPE (instance))) == FUNCTION_TYPE)
1262 return build_function_call (instance, parms);
1263 else if (TREE_CODE (TREE_TYPE (TREE_TYPE (instance))) == METHOD_TYPE)
1264 return build_function_call (instance, tree_cons (NULL_TREE, current_class_decl, parms));
1265 }
1266 }
1267 return NULL_TREE;
1268 }
1269
1270 /* Check to see if this is not really a reference to an instance variable
1271 with `operator()()' overloaded. */
1272 field = lookup_field (basetype_path, name, 1, 0);
1273
1274 /* This can happen if the reference was ambiguous or for access
1275 violations. */
1276 if (field == error_mark_node)
1277 return error_mark_node;
1278
1279 if (field)
1280 {
1281 tree basetype;
1282 tree ftype = TREE_TYPE (field);
1283
1284 if (TREE_CODE (ftype) == REFERENCE_TYPE)
1285 ftype = TREE_TYPE (ftype);
1286
1287 if (TYPE_LANG_SPECIFIC (ftype) && TYPE_OVERLOADS_CALL_EXPR (ftype))
1288 {
1289 /* Make the next search for this field very short. */
1290 basetype = DECL_FIELD_CONTEXT (field);
1291 instance_ptr = convert_pointer_to (basetype, instance_ptr);
1292
1293 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1294 return build_opfncall (CALL_EXPR, LOOKUP_NORMAL,
1295 build_component_ref_1 (instance, field, 0),
1296 parms, NULL_TREE);
1297 }
1298 if (TREE_CODE (ftype) == POINTER_TYPE)
1299 {
1300 if (TREE_CODE (TREE_TYPE (ftype)) == FUNCTION_TYPE
1301 || TREE_CODE (TREE_TYPE (ftype)) == METHOD_TYPE)
1302 {
1303 /* This is a member which is a pointer to function. */
1304 tree ref
1305 = build_component_ref_1 (build_indirect_ref (instance_ptr,
1306 NULL_PTR),
1307 field, LOOKUP_COMPLAIN);
1308 if (ref == error_mark_node)
1309 return error_mark_node;
1310 return build_function_call (ref, parms);
1311 }
1312 }
1313 else if (TREE_CODE (ftype) == METHOD_TYPE)
1314 {
1315 error ("invalid call via pointer-to-member function");
1316 return error_mark_node;
1317 }
1318 else
1319 return NULL_TREE;
1320 }
1321 return NULL_TREE;
1322 }
1323
1324 tree
1325 find_scoped_type (type, inner_name, inner_types)
1326 tree type, inner_name, inner_types;
1327 {
1328 tree tags = CLASSTYPE_TAGS (type);
1329
1330 while (tags)
1331 {
1332 /* The TREE_PURPOSE of an enum tag (which becomes a member of the
1333 enclosing class) is set to the name for the enum type. So, if
1334 inner_name is `bar', and we strike `baz' for `enum bar { baz }',
1335 then this test will be true. */
1336 if (TREE_PURPOSE (tags) == inner_name)
1337 {
1338 if (inner_types == NULL_TREE)
1339 return DECL_NESTED_TYPENAME (TYPE_NAME (TREE_VALUE (tags)));
1340 return resolve_scope_to_name (TREE_VALUE (tags), inner_types);
1341 }
1342 tags = TREE_CHAIN (tags);
1343 }
1344
1345 #if 0
1346 /* XXX This needs to be fixed better. */
1347 if (TREE_CODE (type) == UNINSTANTIATED_P_TYPE)
1348 {
1349 sorry ("nested class lookup in template type");
1350 return NULL_TREE;
1351 }
1352 #endif
1353
1354 /* Look for a TYPE_DECL. */
1355 for (tags = TYPE_FIELDS (type); tags; tags = TREE_CHAIN (tags))
1356 if (TREE_CODE (tags) == TYPE_DECL && DECL_NAME (tags) == inner_name)
1357 {
1358 /* Code by raeburn. */
1359 if (inner_types == NULL_TREE)
1360 return DECL_NESTED_TYPENAME (tags);
1361 return resolve_scope_to_name (TREE_TYPE (tags), inner_types);
1362 }
1363
1364 return NULL_TREE;
1365 }
1366
1367 /* Resolve an expression NAME1::NAME2::...::NAMEn to
1368 the name that names the above nested type. INNER_TYPES
1369 is a chain of nested type names (held together by SCOPE_REFs);
1370 OUTER_TYPE is the type we know to enclose INNER_TYPES.
1371 Returns NULL_TREE if there is an error. */
1372 tree
1373 resolve_scope_to_name (outer_type, inner_stuff)
1374 tree outer_type, inner_stuff;
1375 {
1376 register tree tmp;
1377 tree inner_name, inner_type;
1378
1379 if (outer_type == NULL_TREE && current_class_type != NULL_TREE)
1380 {
1381 /* We first try to look for a nesting in our current class context,
1382 then try any enclosing classes. */
1383 tree type = current_class_type;
1384
1385 while (type && (TREE_CODE (type) == RECORD_TYPE
1386 || TREE_CODE (type) == UNION_TYPE))
1387 {
1388 tree rval = resolve_scope_to_name (type, inner_stuff);
1389
1390 if (rval != NULL_TREE)
1391 return rval;
1392 type = DECL_CONTEXT (TYPE_NAME (type));
1393 }
1394 }
1395
1396 if (TREE_CODE (inner_stuff) == SCOPE_REF)
1397 {
1398 inner_name = TREE_OPERAND (inner_stuff, 0);
1399 inner_type = TREE_OPERAND (inner_stuff, 1);
1400 }
1401 else
1402 {
1403 inner_name = inner_stuff;
1404 inner_type = NULL_TREE;
1405 }
1406
1407 if (outer_type == NULL_TREE)
1408 {
1409 tree x;
1410 /* If we have something that's already a type by itself,
1411 use that. */
1412 if (IDENTIFIER_HAS_TYPE_VALUE (inner_name))
1413 {
1414 if (inner_type)
1415 return resolve_scope_to_name (IDENTIFIER_TYPE_VALUE (inner_name),
1416 inner_type);
1417 return inner_name;
1418 }
1419
1420 x = lookup_name (inner_name, 0);
1421
1422 if (x && TREE_CODE (x) == NAMESPACE_DECL)
1423 {
1424 x = lookup_namespace_name (x, inner_type);
1425 return x;
1426 }
1427 return NULL_TREE;
1428 }
1429
1430 if (! IS_AGGR_TYPE (outer_type))
1431 return NULL_TREE;
1432
1433 /* Look for member classes or enums. */
1434 tmp = find_scoped_type (outer_type, inner_name, inner_type);
1435
1436 /* If it's not a type in this class, then go down into the
1437 base classes and search there. */
1438 if (! tmp && TYPE_BINFO (outer_type))
1439 {
1440 tree binfos = TYPE_BINFO_BASETYPES (outer_type);
1441 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
1442
1443 for (i = 0; i < n_baselinks; i++)
1444 {
1445 tree base_binfo = TREE_VEC_ELT (binfos, i);
1446 tmp = resolve_scope_to_name (BINFO_TYPE (base_binfo), inner_stuff);
1447 if (tmp)
1448 return tmp;
1449 }
1450 tmp = NULL_TREE;
1451 }
1452
1453 return tmp;
1454 }
1455
1456 /* Build a method call of the form `EXP->SCOPES::NAME (PARMS)'.
1457 This is how virtual function calls are avoided. */
1458 tree
1459 build_scoped_method_call (exp, scopes, name, parms)
1460 tree exp, scopes, name, parms;
1461 {
1462 /* Because this syntactic form does not allow
1463 a pointer to a base class to be `stolen',
1464 we need not protect the derived->base conversion
1465 that happens here.
1466
1467 @@ But we do have to check access privileges later. */
1468 tree basename = resolve_scope_to_name (NULL_TREE, scopes);
1469 tree basetype, binfo, decl;
1470 tree type = TREE_TYPE (exp);
1471
1472 if (type == error_mark_node
1473 || basename == NULL_TREE)
1474 return error_mark_node;
1475
1476 basetype = IDENTIFIER_TYPE_VALUE (basename);
1477
1478 if (TREE_CODE (type) == REFERENCE_TYPE)
1479 type = TREE_TYPE (type);
1480
1481 /* Destructors can be "called" for simple types; see 5.2.4 and 12.4 Note
1482 that explicit ~int is caught in the parser; this deals with typedefs
1483 and template parms. */
1484 if (TREE_CODE (name) == BIT_NOT_EXPR && ! is_aggr_typedef (basename, 0))
1485 {
1486 if (type != basetype)
1487 cp_error ("type of `%E' does not match destructor type `%T' (type was `%T')",
1488 exp, basetype, type);
1489 name = TREE_OPERAND (name, 0);
1490 if (basetype != get_type_value (name))
1491 cp_error ("qualified type `%T' does not match destructor name `~%T'",
1492 basetype, name);
1493 return convert (void_type_node, exp);
1494 }
1495
1496 if (! is_aggr_typedef (basename, 1))
1497 return error_mark_node;
1498
1499 if (! IS_AGGR_TYPE (type))
1500 {
1501 cp_error ("base object `%E' of scoped method call is of non-aggregate type `%T'",
1502 exp, type);
1503 return error_mark_node;
1504 }
1505
1506 if ((binfo = binfo_or_else (basetype, type)))
1507 {
1508 if (binfo == error_mark_node)
1509 return error_mark_node;
1510 if (TREE_CODE (exp) == INDIRECT_REF)
1511 decl = build_indirect_ref (convert_pointer_to (binfo,
1512 build_unary_op (ADDR_EXPR, exp, 0)), NULL_PTR);
1513 else
1514 decl = build_scoped_ref (exp, scopes);
1515
1516 /* Call to a destructor. */
1517 if (TREE_CODE (name) == BIT_NOT_EXPR)
1518 {
1519 /* Explicit call to destructor. */
1520 name = TREE_OPERAND (name, 0);
1521 if (! (name == constructor_name (TREE_TYPE (decl))
1522 || TREE_TYPE (decl) == get_type_value (name)))
1523 {
1524 cp_error
1525 ("qualified type `%T' does not match destructor name `~%T'",
1526 TREE_TYPE (decl), name);
1527 return error_mark_node;
1528 }
1529 if (! TYPE_HAS_DESTRUCTOR (TREE_TYPE (decl)))
1530 return convert (void_type_node, exp);
1531
1532 return build_delete (TREE_TYPE (decl), decl, integer_two_node,
1533 LOOKUP_NORMAL|LOOKUP_NONVIRTUAL|LOOKUP_DESTRUCTOR,
1534 0);
1535 }
1536
1537 /* Call to a method. */
1538 return build_method_call (decl, name, parms, binfo,
1539 LOOKUP_NORMAL|LOOKUP_NONVIRTUAL);
1540 }
1541 return error_mark_node;
1542 }
1543
1544 static void
1545 print_candidates (candidates)
1546 tree candidates;
1547 {
1548 cp_error_at ("candidates are: %D", TREE_VALUE (candidates));
1549 candidates = TREE_CHAIN (candidates);
1550
1551 while (candidates)
1552 {
1553 cp_error_at (" %D", TREE_VALUE (candidates));
1554 candidates = TREE_CHAIN (candidates);
1555 }
1556 }
1557
1558 static void
1559 print_n_candidates (candidates, n)
1560 struct candidate *candidates;
1561 int n;
1562 {
1563 int i;
1564
1565 cp_error_at ("candidates are: %D", candidates[0].function);
1566 for (i = 1; i < n; i++)
1567 cp_error_at (" %D", candidates[i].function);
1568 }
1569
1570 /* Build something of the form ptr->method (args)
1571 or object.method (args). This can also build
1572 calls to constructors, and find friends.
1573
1574 Member functions always take their class variable
1575 as a pointer.
1576
1577 INSTANCE is a class instance.
1578
1579 NAME is the name of the method desired, usually an IDENTIFIER_NODE.
1580
1581 PARMS help to figure out what that NAME really refers to.
1582
1583 BASETYPE_PATH, if non-NULL, contains a chain from the type of INSTANCE
1584 down to the real instance type to use for access checking. We need this
1585 information to get protected accesses correct. This parameter is used
1586 by build_member_call.
1587
1588 FLAGS is the logical disjunction of zero or more LOOKUP_
1589 flags. See cp-tree.h for more info.
1590
1591 If this is all OK, calls build_function_call with the resolved
1592 member function.
1593
1594 This function must also handle being called to perform
1595 initialization, promotion/coercion of arguments, and
1596 instantiation of default parameters.
1597
1598 Note that NAME may refer to an instance variable name. If
1599 `operator()()' is defined for the type of that field, then we return
1600 that result. */
1601 tree
1602 build_method_call (instance, name, parms, basetype_path, flags)
1603 tree instance, name, parms, basetype_path;
1604 int flags;
1605 {
1606 register tree function, fntype, value_type;
1607 register tree basetype, save_basetype;
1608 register tree baselink, result, method_name, parmtypes, parm;
1609 tree last;
1610 int pass;
1611 enum access_type access = access_public;
1612
1613 /* Range of cases for vtable optimization. */
1614 enum vtable_needs { not_needed, maybe_needed, unneeded, needed };
1615 enum vtable_needs need_vtbl = not_needed;
1616
1617 char *name_kind;
1618 int ever_seen = 0;
1619 tree instance_ptr = NULL_TREE;
1620 int all_virtual = flag_all_virtual;
1621 int static_call_context = 0;
1622 tree found_fns = NULL_TREE;
1623
1624 /* Keep track of `const' and `volatile' objects. */
1625 int constp, volatilep;
1626
1627 #ifdef GATHER_STATISTICS
1628 n_build_method_call++;
1629 #endif
1630
1631 if (instance == error_mark_node
1632 || name == error_mark_node
1633 || parms == error_mark_node
1634 || (instance != NULL_TREE && TREE_TYPE (instance) == error_mark_node))
1635 return error_mark_node;
1636
1637 /* This is the logic that magically deletes the second argument to
1638 operator delete, if it is not needed. */
1639 if (name == ansi_opname[(int) DELETE_EXPR] && list_length (parms)==2)
1640 {
1641 tree save_last = TREE_CHAIN (parms);
1642 tree result;
1643 /* get rid of unneeded argument */
1644 TREE_CHAIN (parms) = NULL_TREE;
1645 result = build_method_call (instance, name, parms, basetype_path,
1646 (LOOKUP_SPECULATIVELY|flags)
1647 &~LOOKUP_COMPLAIN);
1648 /* If it finds a match, return it. */
1649 if (result)
1650 return build_method_call (instance, name, parms, basetype_path, flags);
1651 /* If it doesn't work, two argument delete must work */
1652 TREE_CHAIN (parms) = save_last;
1653 }
1654 /* We already know whether it's needed or not for vec delete. */
1655 else if (name == ansi_opname[(int) VEC_DELETE_EXPR]
1656 && ! TYPE_VEC_DELETE_TAKES_SIZE (TREE_TYPE (instance)))
1657 TREE_CHAIN (parms) = NULL_TREE;
1658
1659 if (TREE_CODE (name) == BIT_NOT_EXPR)
1660 {
1661 flags |= LOOKUP_DESTRUCTOR;
1662 name = TREE_OPERAND (name, 0);
1663 if (parms)
1664 error ("destructors take no parameters");
1665 basetype = TREE_TYPE (instance);
1666 if (TREE_CODE (basetype) == REFERENCE_TYPE)
1667 basetype = TREE_TYPE (basetype);
1668 if (! ((IS_AGGR_TYPE (basetype)
1669 && name == constructor_name (basetype))
1670 || basetype == get_type_value (name)))
1671 {
1672 cp_error ("destructor name `~%D' does not match type `%T' of expression",
1673 name, basetype);
1674 return convert (void_type_node, instance);
1675 }
1676
1677 if (! TYPE_HAS_DESTRUCTOR (basetype))
1678 return convert (void_type_node, instance);
1679 instance = default_conversion (instance);
1680 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1681 return build_delete (build_pointer_type (basetype),
1682 instance_ptr, integer_two_node,
1683 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0);
1684 }
1685
1686 {
1687 char *xref_name;
1688
1689 /* Initialize name for error reporting. */
1690 if (IDENTIFIER_OPNAME_P (name) && ! IDENTIFIER_TYPENAME_P (name))
1691 {
1692 char *p = operator_name_string (name);
1693 xref_name = (char *)alloca (strlen (p) + 10);
1694 sprintf (xref_name, "operator %s", p);
1695 }
1696 else if (TREE_CODE (name) == SCOPE_REF)
1697 xref_name = IDENTIFIER_POINTER (TREE_OPERAND (name, 1));
1698 else
1699 xref_name = IDENTIFIER_POINTER (name);
1700
1701 GNU_xref_call (current_function_decl, xref_name);
1702 }
1703
1704 if (instance == NULL_TREE)
1705 {
1706 basetype = NULL_TREE;
1707 /* Check cases where this is really a call to raise
1708 an exception. */
1709 if (current_class_type && TREE_CODE (name) == IDENTIFIER_NODE)
1710 {
1711 basetype = purpose_member (name, CLASSTYPE_TAGS (current_class_type));
1712 if (basetype)
1713 basetype = TREE_VALUE (basetype);
1714 }
1715 else if (TREE_CODE (name) == SCOPE_REF
1716 && TREE_CODE (TREE_OPERAND (name, 0)) == IDENTIFIER_NODE)
1717 {
1718 if (! is_aggr_typedef (TREE_OPERAND (name, 0), 1))
1719 return error_mark_node;
1720 basetype = purpose_member (TREE_OPERAND (name, 1),
1721 CLASSTYPE_TAGS (IDENTIFIER_TYPE_VALUE (TREE_OPERAND (name, 0))));
1722 if (basetype)
1723 basetype = TREE_VALUE (basetype);
1724 }
1725
1726 if (basetype != NULL_TREE)
1727 ;
1728 /* call to a constructor... */
1729 else if (basetype_path)
1730 basetype = BINFO_TYPE (basetype_path);
1731 else if (IDENTIFIER_HAS_TYPE_VALUE (name))
1732 {
1733 basetype = IDENTIFIER_TYPE_VALUE (name);
1734 name = constructor_name_full (basetype);
1735 }
1736 else
1737 {
1738 tree typedef_name = lookup_name (name, 1);
1739 if (typedef_name && TREE_CODE (typedef_name) == TYPE_DECL)
1740 {
1741 /* Canonicalize the typedef name. */
1742 basetype = TREE_TYPE (typedef_name);
1743 name = TYPE_IDENTIFIER (basetype);
1744 }
1745 else
1746 {
1747 cp_error ("no constructor named `%T' in scope",
1748 name);
1749 return error_mark_node;
1750 }
1751 }
1752
1753 if (! IS_AGGR_TYPE (basetype))
1754 {
1755 non_aggr_error:
1756 if ((flags & LOOKUP_COMPLAIN) && TREE_CODE (basetype) != ERROR_MARK)
1757 cp_error ("request for member `%D' in `%E', which is of non-aggregate type `%T'",
1758 name, instance, basetype);
1759
1760 return error_mark_node;
1761 }
1762 }
1763 else if (instance == C_C_D || instance == current_class_decl)
1764 {
1765 /* When doing initialization, we side-effect the TREE_TYPE of
1766 C_C_D, hence we cannot set up BASETYPE from CURRENT_CLASS_TYPE. */
1767 basetype = TREE_TYPE (C_C_D);
1768
1769 /* Anything manifestly `this' in constructors and destructors
1770 has a known type, so virtual function tables are not needed. */
1771 if (TYPE_VIRTUAL_P (basetype)
1772 && !(flags & LOOKUP_NONVIRTUAL))
1773 need_vtbl = (dtor_label || ctor_label)
1774 ? unneeded : maybe_needed;
1775
1776 /* If `this' is a signature pointer and `name' is not a constructor,
1777 we are calling a signature member function. In that case, set the
1778 `basetype' to the signature type and dereference the `optr' field. */
1779 if (IS_SIGNATURE_POINTER (basetype)
1780 && TYPE_IDENTIFIER (basetype) != name)
1781 {
1782 basetype = SIGNATURE_TYPE (basetype);
1783 instance_ptr = build_optr_ref (instance);
1784 instance_ptr = convert (build_pointer_type (basetype), instance_ptr);
1785 basetype_path = TYPE_BINFO (basetype);
1786 }
1787 else
1788 {
1789 instance = C_C_D;
1790 instance_ptr = current_class_decl;
1791 basetype_path = TYPE_BINFO (current_class_type);
1792 }
1793 result = build_field_call (basetype_path, instance_ptr, name, parms);
1794
1795 if (result)
1796 return result;
1797 }
1798 else if (TREE_CODE (instance) == RESULT_DECL)
1799 {
1800 basetype = TREE_TYPE (instance);
1801 /* Should we ever have to make a virtual function reference
1802 from a RESULT_DECL, know that it must be of fixed type
1803 within the scope of this function. */
1804 if (!(flags & LOOKUP_NONVIRTUAL) && TYPE_VIRTUAL_P (basetype))
1805 need_vtbl = maybe_needed;
1806 instance_ptr = build1 (ADDR_EXPR, build_pointer_type (basetype), instance);
1807 }
1808 else
1809 {
1810 /* The MAIN_VARIANT of the type that `instance_ptr' winds up being. */
1811 tree inst_ptr_basetype;
1812
1813 static_call_context =
1814 (TREE_CODE (instance) == INDIRECT_REF
1815 && TREE_CODE (TREE_OPERAND (instance, 0)) == NOP_EXPR
1816 && TREE_OPERAND (TREE_OPERAND (instance, 0), 0) == error_mark_node);
1817
1818 if (TREE_CODE (instance) == OFFSET_REF)
1819 instance = resolve_offset_ref (instance);
1820
1821 /* the base type of an instance variable is pointer to class */
1822 basetype = TREE_TYPE (instance);
1823
1824 if (TREE_CODE (basetype) == REFERENCE_TYPE)
1825 {
1826 basetype = TREE_TYPE (basetype);
1827 if (! IS_AGGR_TYPE (basetype))
1828 goto non_aggr_error;
1829 /* Call to convert not needed because we are remaining
1830 within the same type. */
1831 instance_ptr = build1 (NOP_EXPR, build_pointer_type (basetype),
1832 instance);
1833 inst_ptr_basetype = TYPE_MAIN_VARIANT (basetype);
1834 }
1835 else
1836 {
1837 if (! IS_AGGR_TYPE (basetype)
1838 && ! (TYPE_LANG_SPECIFIC (basetype)
1839 && (IS_SIGNATURE_POINTER (basetype)
1840 || IS_SIGNATURE_REFERENCE (basetype))))
1841 goto non_aggr_error;
1842
1843 /* If `instance' is a signature pointer/reference and `name' is
1844 not a constructor, we are calling a signature member function.
1845 In that case set the `basetype' to the signature type. */
1846 if ((IS_SIGNATURE_POINTER (basetype)
1847 || IS_SIGNATURE_REFERENCE (basetype))
1848 && TYPE_IDENTIFIER (basetype) != name)
1849 basetype = SIGNATURE_TYPE (basetype);
1850
1851 if ((IS_SIGNATURE (basetype)
1852 && (instance_ptr = instance))
1853 || (lvalue_p (instance)
1854 && (instance_ptr = build_unary_op (ADDR_EXPR, instance, 0)))
1855 || (instance_ptr = unary_complex_lvalue (ADDR_EXPR, instance)))
1856 {
1857 if (instance_ptr == error_mark_node)
1858 return error_mark_node;
1859 }
1860 else if (TREE_CODE (instance) == NOP_EXPR
1861 || TREE_CODE (instance) == CONSTRUCTOR)
1862 {
1863 /* A cast is not an lvalue. Initialize a fresh temp
1864 with the value we are casting from, and proceed with
1865 that temporary. We can't cast to a reference type,
1866 so that simplifies the initialization to something
1867 we can manage. */
1868 tree temp = get_temp_name (TREE_TYPE (instance), 0);
1869 if (IS_AGGR_TYPE (TREE_TYPE (instance)))
1870 expand_aggr_init (temp, instance, 0, flags);
1871 else
1872 {
1873 store_init_value (temp, instance);
1874 expand_decl_init (temp);
1875 }
1876 instance = temp;
1877 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1878 }
1879 else
1880 {
1881 if (TREE_CODE (instance) != CALL_EXPR)
1882 my_friendly_abort (125);
1883 if (TYPE_NEEDS_CONSTRUCTING (basetype))
1884 instance = build_cplus_new (basetype, instance, 0);
1885 else
1886 {
1887 instance = get_temp_name (basetype, 0);
1888 TREE_ADDRESSABLE (instance) = 1;
1889 }
1890 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1891 }
1892 /* @@ Should we call comp_target_types here? */
1893 if (IS_SIGNATURE (basetype))
1894 inst_ptr_basetype = basetype;
1895 else
1896 inst_ptr_basetype = TREE_TYPE (TREE_TYPE (instance_ptr));
1897 if (TYPE_MAIN_VARIANT (basetype) == TYPE_MAIN_VARIANT (inst_ptr_basetype))
1898 basetype = inst_ptr_basetype;
1899 else
1900 {
1901 instance_ptr = convert (build_pointer_type (basetype), instance_ptr);
1902 if (instance_ptr == error_mark_node)
1903 return error_mark_node;
1904 }
1905 }
1906
1907 /* After converting `instance_ptr' above, `inst_ptr_basetype' was
1908 not updated, so we use `basetype' instead. */
1909 if (basetype_path == NULL_TREE
1910 && IS_SIGNATURE (basetype))
1911 basetype_path = TYPE_BINFO (basetype);
1912 else if (basetype_path == NULL_TREE ||
1913 BINFO_TYPE (basetype_path) != TYPE_MAIN_VARIANT (inst_ptr_basetype))
1914 basetype_path = TYPE_BINFO (inst_ptr_basetype);
1915
1916 result = build_field_call (basetype_path, instance_ptr, name, parms);
1917 if (result)
1918 return result;
1919
1920 if (!(flags & LOOKUP_NONVIRTUAL) && TYPE_VIRTUAL_P (basetype))
1921 {
1922 if (TREE_SIDE_EFFECTS (instance_ptr))
1923 {
1924 /* This action is needed because the instance is needed
1925 for providing the base of the virtual function table.
1926 Without using a SAVE_EXPR, the function we are building
1927 may be called twice, or side effects on the instance
1928 variable (such as a post-increment), may happen twice. */
1929 instance_ptr = save_expr (instance_ptr);
1930 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1931 }
1932 else if (TREE_CODE (TREE_TYPE (instance)) == POINTER_TYPE)
1933 {
1934 /* This happens when called for operator new (). */
1935 instance = build_indirect_ref (instance, NULL_PTR);
1936 }
1937
1938 need_vtbl = maybe_needed;
1939 }
1940 }
1941
1942 if (TYPE_SIZE (basetype) == 0)
1943 {
1944 /* This is worth complaining about, I think. */
1945 cp_error ("cannot lookup method in incomplete type `%T'", basetype);
1946 return error_mark_node;
1947 }
1948
1949 save_basetype = TYPE_MAIN_VARIANT (basetype);
1950
1951 #if 0
1952 if (all_virtual == 1
1953 && (! strncmp (IDENTIFIER_POINTER (name), OPERATOR_METHOD_FORMAT,
1954 OPERATOR_METHOD_LENGTH)
1955 || instance_ptr == NULL_TREE
1956 || (TYPE_OVERLOADS_METHOD_CALL_EXPR (basetype) == 0)))
1957 all_virtual = 0;
1958 #endif
1959
1960 last = NULL_TREE;
1961 for (parmtypes = NULL_TREE, parm = parms; parm; parm = TREE_CHAIN (parm))
1962 {
1963 tree t = TREE_TYPE (TREE_VALUE (parm));
1964 if (TREE_CODE (t) == OFFSET_TYPE)
1965 {
1966 /* Convert OFFSET_TYPE entities to their normal selves. */
1967 TREE_VALUE (parm) = resolve_offset_ref (TREE_VALUE (parm));
1968 t = TREE_TYPE (TREE_VALUE (parm));
1969 }
1970 if (TREE_CODE (TREE_VALUE (parm)) == OFFSET_REF
1971 && TREE_CODE (t) == METHOD_TYPE)
1972 {
1973 TREE_VALUE (parm) = build_unary_op (ADDR_EXPR, TREE_VALUE (parm), 0);
1974 }
1975 #if 0
1976 /* This breaks reference-to-array parameters. */
1977 if (TREE_CODE (t) == ARRAY_TYPE)
1978 {
1979 /* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
1980 This eliminates needless calls to `compute_conversion_costs'. */
1981 TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
1982 t = TREE_TYPE (TREE_VALUE (parm));
1983 }
1984 #endif
1985 if (t == error_mark_node)
1986 return error_mark_node;
1987 last = build_tree_list (NULL_TREE, t);
1988 parmtypes = chainon (parmtypes, last);
1989 }
1990
1991 if (instance && IS_SIGNATURE (basetype))
1992 {
1993 /* @@ Should this be the constp/volatilep flags for the optr field
1994 of the signature pointer? */
1995 constp = TYPE_READONLY (basetype);
1996 volatilep = TYPE_VOLATILE (basetype);
1997 parms = tree_cons (NULL_TREE, instance_ptr, parms);
1998 }
1999 else if (instance)
2000 {
2001 /* TREE_READONLY (instance) fails for references. */
2002 constp = TYPE_READONLY (TREE_TYPE (TREE_TYPE (instance_ptr)));
2003 volatilep = TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (instance_ptr)));
2004 parms = tree_cons (NULL_TREE, instance_ptr, parms);
2005 }
2006 else
2007 {
2008 /* Raw constructors are always in charge. */
2009 if (TYPE_USES_VIRTUAL_BASECLASSES (basetype)
2010 && ! (flags & LOOKUP_HAS_IN_CHARGE))
2011 {
2012 flags |= LOOKUP_HAS_IN_CHARGE;
2013 parms = tree_cons (NULL_TREE, integer_one_node, parms);
2014 parmtypes = tree_cons (NULL_TREE, integer_type_node, parmtypes);
2015 }
2016
2017 constp = 0;
2018 volatilep = 0;
2019 instance_ptr = build_int_2 (0, 0);
2020 TREE_TYPE (instance_ptr) = build_pointer_type (basetype);
2021 parms = tree_cons (NULL_TREE, instance_ptr, parms);
2022 }
2023
2024 parmtypes = tree_cons (NULL_TREE, TREE_TYPE (instance_ptr), parmtypes);
2025
2026 if (last == NULL_TREE)
2027 last = parmtypes;
2028
2029 /* Look up function name in the structure type definition. */
2030
2031 if ((IDENTIFIER_HAS_TYPE_VALUE (name)
2032 && ! IDENTIFIER_OPNAME_P (name)
2033 && IS_AGGR_TYPE (IDENTIFIER_TYPE_VALUE (name))
2034 && TREE_CODE (IDENTIFIER_TYPE_VALUE (name)) != UNINSTANTIATED_P_TYPE)
2035 || name == constructor_name (basetype))
2036 {
2037 tree tmp = NULL_TREE;
2038 if (IDENTIFIER_TYPE_VALUE (name) == basetype
2039 || name == constructor_name (basetype))
2040 tmp = TYPE_BINFO (basetype);
2041 else
2042 tmp = get_binfo (IDENTIFIER_TYPE_VALUE (name), basetype, 0);
2043
2044 if (tmp != NULL_TREE)
2045 {
2046 name_kind = "constructor";
2047
2048 if (TYPE_USES_VIRTUAL_BASECLASSES (basetype)
2049 && ! (flags & LOOKUP_HAS_IN_CHARGE))
2050 {
2051 /* Constructors called for initialization
2052 only are never in charge. */
2053 tree tmplist;
2054
2055 flags |= LOOKUP_HAS_IN_CHARGE;
2056 tmplist = tree_cons (NULL_TREE, integer_zero_node,
2057 TREE_CHAIN (parms));
2058 TREE_CHAIN (parms) = tmplist;
2059 tmplist = tree_cons (NULL_TREE, integer_type_node, TREE_CHAIN (parmtypes));
2060 TREE_CHAIN (parmtypes) = tmplist;
2061 }
2062 basetype = BINFO_TYPE (tmp);
2063 }
2064 else
2065 name_kind = "method";
2066 }
2067 else
2068 name_kind = "method";
2069
2070 if (basetype_path == NULL_TREE
2071 || BINFO_TYPE (basetype_path) != TYPE_MAIN_VARIANT (basetype))
2072 basetype_path = TYPE_BINFO (basetype);
2073 result = lookup_fnfields (basetype_path, name,
2074 (flags & LOOKUP_COMPLAIN));
2075 if (result == error_mark_node)
2076 return error_mark_node;
2077
2078
2079 #if 0
2080 /* Now, go look for this method name. We do not find destructors here.
2081
2082 Putting `void_list_node' on the end of the parmtypes
2083 fakes out `build_decl_overload' into doing the right thing. */
2084 TREE_CHAIN (last) = void_list_node;
2085 method_name = build_decl_overload (name, parmtypes,
2086 1 + (name == constructor_name (save_basetype)
2087 || name == constructor_name_full (save_basetype)));
2088 TREE_CHAIN (last) = NULL_TREE;
2089 #endif
2090
2091 for (pass = 0; pass < 2; pass++)
2092 {
2093 struct candidate *candidates;
2094 struct candidate *cp;
2095 int len;
2096 unsigned best = 1;
2097
2098 /* This increments every time we go up the type hierarchy.
2099 The idea is to prefer a function of the derived class if possible. */
2100 int b_or_d = 0;
2101
2102 baselink = result;
2103
2104 if (pass > 0)
2105 {
2106 candidates
2107 = (struct candidate *) alloca ((ever_seen+1)
2108 * sizeof (struct candidate));
2109 bzero ((char *) candidates, (ever_seen + 1) * sizeof (struct candidate));
2110 cp = candidates;
2111 len = list_length (parms);
2112 ever_seen = 0;
2113
2114 /* First see if a global function has a shot at it. */
2115 if (flags & LOOKUP_GLOBAL)
2116 {
2117 tree friend_parms;
2118 tree parm = instance_ptr;
2119
2120 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE)
2121 parm = convert_from_reference (parm);
2122 else if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2123 parm = build_indirect_ref (parm, "friendifying parms (compiler error)");
2124 else
2125 my_friendly_abort (167);
2126
2127 friend_parms = tree_cons (NULL_TREE, parm, TREE_CHAIN (parms));
2128
2129 cp->h_len = len;
2130 cp->harshness = (struct harshness_code *)
2131 alloca ((len + 1) * sizeof (struct harshness_code));
2132
2133 result = build_overload_call (name, friend_parms, 0, cp);
2134 /* If it turns out to be the one we were actually looking for
2135 (it was probably a friend function), the return the
2136 good result. */
2137 if (TREE_CODE (result) == CALL_EXPR)
2138 return result;
2139
2140 while ((cp->h.code & EVIL_CODE) == 0)
2141 {
2142 /* non-standard uses: set the field to 0 to indicate
2143 we are using a non-member function. */
2144 cp->u.field = 0;
2145 if (cp->harshness[len].distance == 0
2146 && cp->h.code < best)
2147 best = cp->h.code;
2148 cp += 1;
2149 }
2150 }
2151 }
2152
2153 while (baselink)
2154 {
2155 /* We have a hit (of sorts). If the parameter list is
2156 "error_mark_node", or some variant thereof, it won't
2157 match any methods. Since we have verified that the is
2158 some method vaguely matching this one (in name at least),
2159 silently return.
2160
2161 Don't stop for friends, however. */
2162 basetype_path = TREE_PURPOSE (baselink);
2163
2164 function = TREE_VALUE (baselink);
2165 if (TREE_CODE (basetype_path) == TREE_LIST)
2166 basetype_path = TREE_VALUE (basetype_path);
2167 basetype = BINFO_TYPE (basetype_path);
2168
2169 #if 0
2170 /* Cast the instance variable if necessary. */
2171 if (basetype != TYPE_MAIN_VARIANT
2172 (TREE_TYPE (TREE_TYPE (TREE_VALUE (parms)))))
2173 {
2174 if (basetype == save_basetype)
2175 TREE_VALUE (parms) = instance_ptr;
2176 else
2177 {
2178 tree type = build_pointer_type
2179 (build_type_variant (basetype, constp, volatilep));
2180 TREE_VALUE (parms) = convert_force (type, instance_ptr, 0);
2181 }
2182 }
2183
2184 /* FIXME: this is the wrong place to get an error. Hopefully
2185 the access-control rewrite will make this change more cleanly. */
2186 if (TREE_VALUE (parms) == error_mark_node)
2187 return error_mark_node;
2188 #endif
2189
2190 if (DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (function)))
2191 function = DECL_CHAIN (function);
2192
2193 for (; function; function = DECL_CHAIN (function))
2194 {
2195 #ifdef GATHER_STATISTICS
2196 n_inner_fields_searched++;
2197 #endif
2198 ever_seen++;
2199 if (pass > 0)
2200 found_fns = tree_cons (NULL_TREE, function, found_fns);
2201
2202 /* Not looking for friends here. */
2203 if (TREE_CODE (TREE_TYPE (function)) == FUNCTION_TYPE
2204 && ! DECL_STATIC_FUNCTION_P (function))
2205 continue;
2206
2207 #if 0
2208 if (pass == 0
2209 && DECL_ASSEMBLER_NAME (function) == method_name)
2210 goto found;
2211 #endif
2212
2213 if (pass > 0)
2214 {
2215 tree these_parms = parms;
2216
2217 #ifdef GATHER_STATISTICS
2218 n_inner_fields_searched++;
2219 #endif
2220 cp->h_len = len;
2221 cp->harshness = (struct harshness_code *)
2222 alloca ((len + 1) * sizeof (struct harshness_code));
2223
2224 if (DECL_STATIC_FUNCTION_P (function))
2225 these_parms = TREE_CHAIN (these_parms);
2226 compute_conversion_costs (function, these_parms, cp, len);
2227
2228 if ((cp->h.code & EVIL_CODE) == 0)
2229 {
2230 cp->u.field = function;
2231 cp->function = function;
2232 cp->basetypes = basetype_path;
2233
2234 /* Don't allow non-converting constructors to convert. */
2235 if (flags & LOOKUP_ONLYCONVERTING
2236 && DECL_LANG_SPECIFIC (function)
2237 && DECL_NONCONVERTING_P (function))
2238 continue;
2239
2240 /* No "two-level" conversions. */
2241 if (flags & LOOKUP_NO_CONVERSION
2242 && (cp->h.code & USER_CODE))
2243 continue;
2244
2245 cp++;
2246 }
2247 }
2248 }
2249 /* Now we have run through one link's member functions.
2250 arrange to head-insert this link's links. */
2251 baselink = next_baselink (baselink);
2252 b_or_d += 1;
2253 /* Don't grab functions from base classes. lookup_fnfield will
2254 do the work to get us down into the right place. */
2255 baselink = NULL_TREE;
2256 }
2257 if (pass == 0)
2258 {
2259 tree igv = lookup_name_nonclass (name);
2260
2261 /* No exact match could be found. Now try to find match
2262 using default conversions. */
2263 if ((flags & LOOKUP_GLOBAL) && igv)
2264 {
2265 if (TREE_CODE (igv) == FUNCTION_DECL)
2266 ever_seen += 1;
2267 else if (TREE_CODE (igv) == TREE_LIST)
2268 ever_seen += count_functions (igv);
2269 }
2270
2271 if (ever_seen == 0)
2272 {
2273 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2274 == LOOKUP_SPECULATIVELY)
2275 return NULL_TREE;
2276
2277 TREE_CHAIN (last) = void_list_node;
2278 if (flags & LOOKUP_GLOBAL)
2279 cp_error ("no global or member function `%D(%A)' defined",
2280 name, parmtypes);
2281 else
2282 cp_error ("no member function `%T::%D(%A)' defined",
2283 save_basetype, name, TREE_CHAIN (parmtypes));
2284 return error_mark_node;
2285 }
2286 continue;
2287 }
2288
2289 if (cp - candidates != 0)
2290 {
2291 /* Rank from worst to best. Then cp will point to best one.
2292 Private fields have their bits flipped. For unsigned
2293 numbers, this should make them look very large.
2294 If the best alternate has a (signed) negative value,
2295 then all we ever saw were private members. */
2296 if (cp - candidates > 1)
2297 {
2298 int n_candidates = cp - candidates;
2299 extern int warn_synth;
2300 TREE_VALUE (parms) = instance_ptr;
2301 cp = ideal_candidate (save_basetype, candidates,
2302 n_candidates, parms, len);
2303 if (cp == (struct candidate *)0)
2304 {
2305 if (flags & LOOKUP_COMPLAIN)
2306 {
2307 TREE_CHAIN (last) = void_list_node;
2308 cp_error ("call of overloaded %s `%D(%A)' is ambiguous",
2309 name_kind, name, TREE_CHAIN (parmtypes));
2310 print_n_candidates (candidates, n_candidates);
2311 }
2312 return error_mark_node;
2313 }
2314 if (cp->h.code & EVIL_CODE)
2315 return error_mark_node;
2316 if (warn_synth
2317 && DECL_NAME (cp->function) == ansi_opname[MODIFY_EXPR]
2318 && DECL_ARTIFICIAL (cp->function)
2319 && n_candidates == 2)
2320 {
2321 cp_warning ("using synthesized `%#D' for copy assignment",
2322 cp->function);
2323 cp_warning_at (" where cfront would use `%#D'",
2324 candidates->function);
2325 }
2326 }
2327 else if (cp[-1].h.code & EVIL_CODE)
2328 {
2329 if (flags & LOOKUP_COMPLAIN)
2330 cp_error ("ambiguous type conversion requested for %s `%D'",
2331 name_kind, name);
2332 return error_mark_node;
2333 }
2334 else
2335 cp--;
2336
2337 /* The global function was the best, so use it. */
2338 if (cp->u.field == 0)
2339 {
2340 /* We must convert the instance pointer into a reference type.
2341 Global overloaded functions can only either take
2342 aggregate objects (which come for free from references)
2343 or reference data types anyway. */
2344 TREE_VALUE (parms) = copy_node (instance_ptr);
2345 TREE_TYPE (TREE_VALUE (parms)) = build_reference_type (TREE_TYPE (TREE_TYPE (instance_ptr)));
2346 return build_function_call (cp->function, parms);
2347 }
2348
2349 function = cp->function;
2350 basetype_path = cp->basetypes;
2351 if (! DECL_STATIC_FUNCTION_P (function))
2352 TREE_VALUE (parms) = cp->arg;
2353 goto found_and_maybe_warn;
2354 }
2355
2356 if (flags & (LOOKUP_COMPLAIN|LOOKUP_SPECULATIVELY))
2357 {
2358 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2359 == LOOKUP_SPECULATIVELY)
2360 return NULL_TREE;
2361
2362 if (DECL_STATIC_FUNCTION_P (cp->function))
2363 parms = TREE_CHAIN (parms);
2364 if (ever_seen)
2365 {
2366 if (flags & LOOKUP_SPECULATIVELY)
2367 return NULL_TREE;
2368 if (static_call_context
2369 && TREE_CODE (TREE_TYPE (cp->function)) == METHOD_TYPE)
2370 cp_error ("object missing in call to `%D'", cp->function);
2371 else if (ever_seen > 1)
2372 {
2373 TREE_CHAIN (last) = void_list_node;
2374 cp_error ("no matching function for call to `%T::%D (%A)%V'",
2375 TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (instance_ptr))),
2376 name, TREE_CHAIN (parmtypes),
2377 TREE_TYPE (TREE_TYPE (instance_ptr)));
2378 TREE_CHAIN (last) = NULL_TREE;
2379 print_candidates (found_fns);
2380 }
2381 else
2382 report_type_mismatch (cp, parms, name_kind);
2383 return error_mark_node;
2384 }
2385
2386 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2387 == LOOKUP_COMPLAIN)
2388 {
2389 cp_error ("%T has no method named %D", save_basetype, name);
2390 return error_mark_node;
2391 }
2392 return NULL_TREE;
2393 }
2394 continue;
2395
2396 found_and_maybe_warn:
2397 if ((cp->harshness[0].code & CONST_CODE)
2398 /* 12.1p2: Constructors can be called for const objects. */
2399 && ! DECL_CONSTRUCTOR_P (cp->function))
2400 {
2401 if (flags & LOOKUP_COMPLAIN)
2402 {
2403 cp_error_at ("non-const member function `%D'", cp->function);
2404 error ("called for const object at this point in file");
2405 }
2406 /* Not good enough for a match. */
2407 else
2408 return error_mark_node;
2409 }
2410 goto found;
2411 }
2412 /* Silently return error_mark_node. */
2413 return error_mark_node;
2414
2415 found:
2416 if (flags & LOOKUP_PROTECT)
2417 access = compute_access (basetype_path, function);
2418
2419 if (access == access_private)
2420 {
2421 if (flags & LOOKUP_COMPLAIN)
2422 {
2423 cp_error_at ("%s `%+#D' is %s", name_kind, function,
2424 TREE_PRIVATE (function) ? "private"
2425 : "from private base class");
2426 error ("within this context");
2427 }
2428 return error_mark_node;
2429 }
2430 else if (access == access_protected)
2431 {
2432 if (flags & LOOKUP_COMPLAIN)
2433 {
2434 cp_error_at ("%s `%+#D' %s", name_kind, function,
2435 TREE_PROTECTED (function) ? "is protected"
2436 : "has protected accessibility");
2437 error ("within this context");
2438 }
2439 return error_mark_node;
2440 }
2441
2442 /* From here on down, BASETYPE is the type that INSTANCE_PTR's
2443 type (if it exists) is a pointer to. */
2444
2445 if (DECL_ABSTRACT_VIRTUAL_P (function)
2446 && instance == C_C_D
2447 && DECL_CONSTRUCTOR_P (current_function_decl)
2448 && ! (flags & LOOKUP_NONVIRTUAL)
2449 && value_member (function, get_abstract_virtuals (basetype)))
2450 cp_error ("abstract virtual `%#D' called from constructor", function);
2451
2452 if (IS_SIGNATURE (basetype) && static_call_context)
2453 {
2454 cp_error ("cannot call signature member function `%T::%D' without signature pointer/reference",
2455 basetype, name);
2456 return error_mark_node;
2457 }
2458 else if (IS_SIGNATURE (basetype))
2459 return build_signature_method_call (basetype, instance, function, parms);
2460
2461 function = DECL_MAIN_VARIANT (function);
2462 /* Declare external function if necessary. */
2463 assemble_external (function);
2464
2465 #if 1
2466 /* Is it a synthesized method that needs to be synthesized? */
2467 if (DECL_ARTIFICIAL (function) && ! flag_no_inline
2468 && ! DECL_INITIAL (function)
2469 /* Kludge: don't synthesize for default args. */
2470 && current_function_decl)
2471 synthesize_method (function);
2472 #endif
2473
2474 if (pedantic && DECL_THIS_INLINE (function) && ! DECL_ARTIFICIAL (function)
2475 && ! DECL_INITIAL (function) && ! DECL_PENDING_INLINE_INFO (function))
2476 cp_warning ("inline function `%#D' called before definition", function);
2477
2478 fntype = TREE_TYPE (function);
2479 if (TREE_CODE (fntype) == POINTER_TYPE)
2480 fntype = TREE_TYPE (fntype);
2481 basetype = DECL_CLASS_CONTEXT (function);
2482
2483 /* If we are referencing a virtual function from an object
2484 of effectively static type, then there is no need
2485 to go through the virtual function table. */
2486 if (need_vtbl == maybe_needed)
2487 {
2488 int fixed_type = resolves_to_fixed_type_p (instance, 0);
2489
2490 if (all_virtual == 1
2491 && DECL_VINDEX (function)
2492 && may_be_remote (basetype))
2493 need_vtbl = needed;
2494 else if (DECL_VINDEX (function))
2495 need_vtbl = fixed_type ? unneeded : needed;
2496 else
2497 need_vtbl = not_needed;
2498 }
2499
2500 if (TREE_CODE (fntype) == METHOD_TYPE && static_call_context
2501 && !DECL_CONSTRUCTOR_P (function))
2502 {
2503 /* Let's be nice to the user for now, and give reasonable
2504 default behavior. */
2505 instance_ptr = current_class_decl;
2506 if (instance_ptr)
2507 {
2508 if (basetype != current_class_type)
2509 {
2510 tree binfo = get_binfo (basetype, current_class_type, 1);
2511 if (binfo == NULL_TREE)
2512 {
2513 error_not_base_type (function, current_class_type);
2514 return error_mark_node;
2515 }
2516 else if (basetype == error_mark_node)
2517 return error_mark_node;
2518 }
2519 }
2520 /* Only allow a static member function to call another static member
2521 function. */
2522 else if (DECL_LANG_SPECIFIC (function)
2523 && !DECL_STATIC_FUNCTION_P (function))
2524 {
2525 cp_error ("cannot call member function `%D' without object",
2526 function);
2527 return error_mark_node;
2528 }
2529 }
2530
2531 value_type = TREE_TYPE (fntype) ? TREE_TYPE (fntype) : void_type_node;
2532
2533 if (TYPE_SIZE (value_type) == 0)
2534 {
2535 if (flags & LOOKUP_COMPLAIN)
2536 incomplete_type_error (0, value_type);
2537 return error_mark_node;
2538 }
2539
2540 if (DECL_STATIC_FUNCTION_P (function))
2541 parms = convert_arguments (NULL_TREE, TYPE_ARG_TYPES (fntype),
2542 TREE_CHAIN (parms), function, LOOKUP_NORMAL);
2543 else if (need_vtbl == unneeded)
2544 {
2545 int sub_flags = DECL_CONSTRUCTOR_P (function) ? flags : LOOKUP_NORMAL;
2546 basetype = TREE_TYPE (instance);
2547 if (TYPE_METHOD_BASETYPE (TREE_TYPE (function)) != TYPE_MAIN_VARIANT (basetype)
2548 && TYPE_USES_COMPLEX_INHERITANCE (basetype))
2549 {
2550 basetype = DECL_CLASS_CONTEXT (function);
2551 instance_ptr = convert_pointer_to (basetype, instance_ptr);
2552 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2553 }
2554 parms = tree_cons (NULL_TREE, instance_ptr,
2555 convert_arguments (NULL_TREE, TREE_CHAIN (TYPE_ARG_TYPES (fntype)), TREE_CHAIN (parms), function, sub_flags));
2556 }
2557 else
2558 {
2559 if ((flags & LOOKUP_NONVIRTUAL) == 0)
2560 basetype = DECL_CONTEXT (function);
2561
2562 /* First parm could be integer_zerop with casts like
2563 ((Object*)0)->Object::IsA() */
2564 if (!integer_zerop (TREE_VALUE (parms)))
2565 {
2566 /* Since we can't have inheritance with a union, doing get_binfo
2567 on it won't work. We do all the convert_pointer_to_real
2568 stuff to handle MI correctly...for unions, that's not
2569 an issue, so we must short-circuit that extra work here. */
2570 tree tmp = TREE_TYPE (TREE_TYPE (TREE_VALUE (parms)));
2571 if (tmp != NULL_TREE && TREE_CODE (tmp) == UNION_TYPE)
2572 instance_ptr = TREE_VALUE (parms);
2573 else
2574 {
2575 tree binfo = get_binfo (basetype,
2576 TREE_TYPE (TREE_TYPE (TREE_VALUE (parms))),
2577 0);
2578 instance_ptr = convert_pointer_to_real (binfo, TREE_VALUE (parms));
2579 }
2580 instance_ptr
2581 = convert_pointer_to (build_type_variant (basetype,
2582 constp, volatilep),
2583 instance_ptr);
2584
2585 if (TREE_CODE (instance_ptr) == COND_EXPR)
2586 {
2587 instance_ptr = save_expr (instance_ptr);
2588 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2589 }
2590 else if (TREE_CODE (instance_ptr) == NOP_EXPR
2591 && TREE_CODE (TREE_OPERAND (instance_ptr, 0)) == ADDR_EXPR
2592 && TREE_OPERAND (TREE_OPERAND (instance_ptr, 0), 0) == instance)
2593 ;
2594 /* The call to `convert_pointer_to' may return error_mark_node. */
2595 else if (TREE_CODE (instance_ptr) == ERROR_MARK)
2596 return instance_ptr;
2597 else if (instance == NULL_TREE
2598 || TREE_CODE (instance) != INDIRECT_REF
2599 || TREE_OPERAND (instance, 0) != instance_ptr)
2600 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2601 }
2602 parms = tree_cons (NULL_TREE, instance_ptr,
2603 convert_arguments (NULL_TREE, TREE_CHAIN (TYPE_ARG_TYPES (fntype)), TREE_CHAIN (parms), function, LOOKUP_NORMAL));
2604 }
2605
2606 #if 0
2607 /* Constructors do not overload method calls. */
2608 else if (TYPE_OVERLOADS_METHOD_CALL_EXPR (basetype)
2609 && name != TYPE_IDENTIFIER (basetype)
2610 && (TREE_CODE (function) != FUNCTION_DECL
2611 || strncmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)),
2612 OPERATOR_METHOD_FORMAT,
2613 OPERATOR_METHOD_LENGTH))
2614 && (may_be_remote (basetype) || instance != C_C_D))
2615 {
2616 tree fn_as_int;
2617
2618 parms = TREE_CHAIN (parms);
2619
2620 if (!all_virtual && TREE_CODE (function) == FUNCTION_DECL)
2621 fn_as_int = build_unary_op (ADDR_EXPR, function, 0);
2622 else
2623 fn_as_int = convert (TREE_TYPE (default_conversion (function)), DECL_VINDEX (function));
2624 if (all_virtual == 1)
2625 fn_as_int = convert (integer_type_node, fn_as_int);
2626
2627 result = build_opfncall (METHOD_CALL_EXPR, LOOKUP_NORMAL, instance, fn_as_int, parms);
2628
2629 if (result == NULL_TREE)
2630 {
2631 compiler_error ("could not overload `operator->()(...)'");
2632 return error_mark_node;
2633 }
2634 else if (result == error_mark_node)
2635 return error_mark_node;
2636
2637 #if 0
2638 /* Do this if we want the result of operator->() to inherit
2639 the type of the function it is subbing for. */
2640 TREE_TYPE (result) = value_type;
2641 #endif
2642
2643 return result;
2644 }
2645 #endif
2646
2647 if (parms == error_mark_node
2648 || (parms && TREE_CHAIN (parms) == error_mark_node))
2649 return error_mark_node;
2650
2651 if (need_vtbl == needed)
2652 {
2653 function = build_vfn_ref (&TREE_VALUE (parms), instance,
2654 DECL_VINDEX (function));
2655 TREE_TYPE (function) = build_pointer_type (fntype);
2656 }
2657
2658 if (TREE_CODE (function) == FUNCTION_DECL)
2659 GNU_xref_call (current_function_decl,
2660 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)));
2661
2662 {
2663 int is_constructor;
2664
2665 if (TREE_CODE (function) == FUNCTION_DECL)
2666 {
2667 is_constructor = DECL_CONSTRUCTOR_P (function);
2668 TREE_USED (function) = 1;
2669 function = default_conversion (function);
2670 }
2671 else
2672 {
2673 is_constructor = 0;
2674 function = default_conversion (function);
2675 }
2676
2677 result = build_nt (CALL_EXPR, function, parms, NULL_TREE);
2678
2679 TREE_TYPE (result) = value_type;
2680 TREE_SIDE_EFFECTS (result) = 1;
2681 TREE_HAS_CONSTRUCTOR (result) = is_constructor;
2682 result = convert_from_reference (result);
2683 return result;
2684 }
2685 }
2686
2687 /* Similar to `build_method_call', but for overloaded non-member functions.
2688 The name of this function comes through NAME. The name depends
2689 on PARMS.
2690
2691 Note that this function must handle simple `C' promotions,
2692 as well as variable numbers of arguments (...), and
2693 default arguments to boot.
2694
2695 If the overloading is successful, we return a tree node which
2696 contains the call to the function.
2697
2698 If overloading produces candidates which are probable, but not definite,
2699 we hold these candidates. If FINAL_CP is non-zero, then we are free
2700 to assume that final_cp points to enough storage for all candidates that
2701 this function might generate. The `harshness' array is preallocated for
2702 the first candidate, but not for subsequent ones.
2703
2704 Note that the DECL_RTL of FUNCTION must be made to agree with this
2705 function's new name. */
2706
2707 tree
2708 build_overload_call_real (fnname, parms, flags, final_cp, buildxxx)
2709 tree fnname, parms;
2710 int flags;
2711 struct candidate *final_cp;
2712 int buildxxx;
2713 {
2714 /* must check for overloading here */
2715 tree overload_name, functions, function, parm;
2716 tree parmtypes = NULL_TREE, last = NULL_TREE;
2717 register tree outer;
2718 int length;
2719 int parmlength = list_length (parms);
2720
2721 struct candidate *candidates, *cp;
2722
2723 if (final_cp)
2724 {
2725 final_cp[0].h.code = 0;
2726 final_cp[0].h.distance = 0;
2727 final_cp[0].function = 0;
2728 /* end marker. */
2729 final_cp[1].h.code = EVIL_CODE;
2730 }
2731
2732 for (parm = parms; parm; parm = TREE_CHAIN (parm))
2733 {
2734 register tree t = TREE_TYPE (TREE_VALUE (parm));
2735
2736 if (t == error_mark_node)
2737 {
2738 if (final_cp)
2739 final_cp->h.code = EVIL_CODE;
2740 return error_mark_node;
2741 }
2742 if (TREE_CODE (t) == OFFSET_TYPE)
2743 #if 0
2744 /* This breaks reference-to-array parameters. */
2745 || TREE_CODE (t) == ARRAY_TYPE
2746 #endif
2747 {
2748 /* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
2749 Also convert OFFSET_TYPE entities to their normal selves.
2750 This eliminates needless calls to `compute_conversion_costs'. */
2751 TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
2752 t = TREE_TYPE (TREE_VALUE (parm));
2753 }
2754 last = build_tree_list (NULL_TREE, t);
2755 parmtypes = chainon (parmtypes, last);
2756 }
2757 if (last)
2758 TREE_CHAIN (last) = void_list_node;
2759 else
2760 parmtypes = void_list_node;
2761
2762 if (is_overloaded_fn (fnname))
2763 {
2764 functions = fnname;
2765 if (TREE_CODE (fnname) == TREE_LIST)
2766 fnname = TREE_PURPOSE (functions);
2767 else if (TREE_CODE (fnname) == FUNCTION_DECL)
2768 fnname = DECL_NAME (functions);
2769 }
2770 else
2771 functions = lookup_name_nonclass (fnname);
2772
2773 if (functions == NULL_TREE)
2774 {
2775 if (flags & LOOKUP_SPECULATIVELY)
2776 return NULL_TREE;
2777 if (flags & LOOKUP_COMPLAIN)
2778 error ("only member functions apply");
2779 if (final_cp)
2780 final_cp->h.code = EVIL_CODE;
2781 return error_mark_node;
2782 }
2783
2784 if (TREE_CODE (functions) == FUNCTION_DECL && ! IDENTIFIER_OPNAME_P (fnname))
2785 {
2786 functions = DECL_MAIN_VARIANT (functions);
2787 if (final_cp)
2788 {
2789 /* We are just curious whether this is a viable alternative or
2790 not. */
2791 compute_conversion_costs (functions, parms, final_cp, parmlength);
2792 return functions;
2793 }
2794 else
2795 return build_function_call_real (functions, parms, 1, flags);
2796 }
2797
2798 if (TREE_CODE (functions) == TREE_LIST
2799 && TREE_VALUE (functions) == NULL_TREE)
2800 {
2801 if (flags & LOOKUP_SPECULATIVELY)
2802 return NULL_TREE;
2803
2804 if (flags & LOOKUP_COMPLAIN)
2805 cp_error ("function `%D' declared overloaded, but no instances of that function declared",
2806 TREE_PURPOSE (functions));
2807 if (final_cp)
2808 final_cp->h.code = EVIL_CODE;
2809 return error_mark_node;
2810 }
2811
2812 length = count_functions (functions);
2813
2814 if (final_cp)
2815 candidates = final_cp;
2816 else
2817 {
2818 candidates
2819 = (struct candidate *)alloca ((length+1) * sizeof (struct candidate));
2820 bzero ((char *) candidates, (length + 1) * sizeof (struct candidate));
2821 }
2822
2823 cp = candidates;
2824
2825 my_friendly_assert (is_overloaded_fn (functions), 169);
2826
2827 functions = get_first_fn (functions);
2828
2829 /* OUTER is the list of FUNCTION_DECLS, in a TREE_LIST. */
2830 for (outer = functions; outer; outer = DECL_CHAIN (outer))
2831 {
2832 int template_cost = 0;
2833 function = outer;
2834 if (TREE_CODE (function) != FUNCTION_DECL
2835 && ! (TREE_CODE (function) == TEMPLATE_DECL
2836 && ! DECL_TEMPLATE_IS_CLASS (function)
2837 && TREE_CODE (DECL_TEMPLATE_RESULT (function)) == FUNCTION_DECL))
2838 {
2839 enum tree_code code = TREE_CODE (function);
2840 if (code == TEMPLATE_DECL)
2841 code = TREE_CODE (DECL_TEMPLATE_RESULT (function));
2842 if (code == CONST_DECL)
2843 cp_error_at
2844 ("enumeral value `%D' conflicts with function of same name",
2845 function);
2846 else if (code == VAR_DECL)
2847 {
2848 if (TREE_STATIC (function))
2849 cp_error_at
2850 ("variable `%D' conflicts with function of same name",
2851 function);
2852 else
2853 cp_error_at
2854 ("constant field `%D' conflicts with function of same name",
2855 function);
2856 }
2857 else if (code == TYPE_DECL)
2858 continue;
2859 else
2860 my_friendly_abort (2);
2861 error ("at this point in file");
2862 continue;
2863 }
2864 if (TREE_CODE (function) == TEMPLATE_DECL)
2865 {
2866 int ntparms = TREE_VEC_LENGTH (DECL_TEMPLATE_PARMS (function));
2867 tree *targs = (tree *) alloca (sizeof (tree) * ntparms);
2868 int i;
2869
2870 i = type_unification (DECL_TEMPLATE_PARMS (function), targs,
2871 TYPE_ARG_TYPES (TREE_TYPE (function)),
2872 parms, &template_cost, 0);
2873 if (i == 0)
2874 function = instantiate_template (function, targs);
2875 }
2876
2877 if (TREE_CODE (function) == TEMPLATE_DECL)
2878 {
2879 /* Unconverted template -- failed match. */
2880 cp->function = function;
2881 cp->u.bad_arg = -4;
2882 cp->h.code = EVIL_CODE;
2883 }
2884 else
2885 {
2886 struct candidate *cp2;
2887
2888 /* Check that this decl is not the same as a function that's in
2889 the list due to some template instantiation. */
2890 cp2 = candidates;
2891 while (cp2 != cp)
2892 if (cp2->function == function)
2893 break;
2894 else
2895 cp2 += 1;
2896 if (cp2->function == function)
2897 continue;
2898
2899 function = DECL_MAIN_VARIANT (function);
2900
2901 /* Can't use alloca here, since result might be
2902 passed to calling function. */
2903 cp->h_len = parmlength;
2904 cp->harshness = (struct harshness_code *)
2905 oballoc ((parmlength + 1) * sizeof (struct harshness_code));
2906
2907 compute_conversion_costs (function, parms, cp, parmlength);
2908
2909 /* Make sure this is clear as well. */
2910 cp->h.int_penalty += template_cost;
2911
2912 if ((cp[0].h.code & EVIL_CODE) == 0)
2913 {
2914 cp[1].h.code = EVIL_CODE;
2915 cp++;
2916 }
2917 }
2918 }
2919
2920 if (cp - candidates)
2921 {
2922 tree rval = error_mark_node;
2923
2924 /* Leave marker. */
2925 cp[0].h.code = EVIL_CODE;
2926 if (cp - candidates > 1)
2927 {
2928 struct candidate *best_cp
2929 = ideal_candidate (NULL_TREE, candidates,
2930 cp - candidates, parms, parmlength);
2931 if (best_cp == (struct candidate *)0)
2932 {
2933 if (flags & LOOKUP_COMPLAIN)
2934 {
2935 cp_error ("call of overloaded `%D' is ambiguous", fnname);
2936 print_n_candidates (candidates, cp - candidates);
2937 }
2938 return error_mark_node;
2939 }
2940 else
2941 rval = best_cp->function;
2942 }
2943 else
2944 {
2945 cp -= 1;
2946 if (cp->h.code & EVIL_CODE)
2947 {
2948 if (flags & LOOKUP_COMPLAIN)
2949 error ("type conversion ambiguous");
2950 }
2951 else
2952 rval = cp->function;
2953 }
2954
2955 if (final_cp)
2956 return rval;
2957
2958 return buildxxx ? build_function_call_real (rval, parms, 0, flags)
2959 : build_function_call_real (rval, parms, 1, flags);
2960 }
2961
2962 if (flags & LOOKUP_SPECULATIVELY)
2963 return NULL_TREE;
2964
2965 if (flags & LOOKUP_COMPLAIN)
2966 report_type_mismatch (cp, parms, "function",
2967 decl_as_string (cp->function, 1));
2968
2969 return error_mark_node;
2970 }
2971
2972 tree
2973 build_overload_call (fnname, parms, flags, final_cp)
2974 tree fnname, parms;
2975 int flags;
2976 struct candidate *final_cp;
2977 {
2978 return build_overload_call_real (fnname, parms, flags, final_cp, 0);
2979 }
2980
2981 tree
2982 build_overload_call_maybe (fnname, parms, flags, final_cp)
2983 tree fnname, parms;
2984 int flags;
2985 struct candidate *final_cp;
2986 {
2987 return build_overload_call_real (fnname, parms, flags, final_cp, 1);
2988 }