54th Cygnus<->FSF merge
[gcc.git] / gcc / cp / call.c
1 /* Functions related to invoking methods and overloaded functions.
2 Copyright (C) 1987, 1992, 1993 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com) and
4 hacked by Brendan Kehoe (brendan@cygnus.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21
22
23 /* High-level class interface. */
24
25 #include "config.h"
26 #include "tree.h"
27 #include <stdio.h>
28 #include "cp-tree.h"
29 #include "class.h"
30 #include "flags.h"
31
32 #include "obstack.h"
33 #define obstack_chunk_alloc xmalloc
34 #define obstack_chunk_free free
35
36 extern void sorry ();
37
38 extern int inhibit_warnings;
39 extern int flag_assume_nonnull_objects;
40 extern tree ctor_label, dtor_label;
41
42 /* From typeck.c: */
43 extern tree unary_complex_lvalue ();
44
45 /* Compute the ease with which a conversion can be performed
46 between an expected and the given type. */
47 static struct harshness_code convert_harshness ();
48
49 #define EVIL_RETURN(ARG) ((ARG).code = EVIL_CODE, (ARG))
50 #define STD_RETURN(ARG) ((ARG).code = STD_CODE, (ARG))
51 #define QUAL_RETURN(ARG) ((ARG).code = QUAL_CODE, (ARG))
52 #define TRIVIAL_RETURN(ARG) ((ARG).code = TRIVIAL_CODE, (ARG))
53 #define ZERO_RETURN(ARG) ((ARG).code = 0, (ARG))
54
55 /* Ordering function for overload resolution. Compare two candidates
56 by gross quality. */
57 int
58 rank_for_overload (x, y)
59 struct candidate *x, *y;
60 {
61 if (y->h.code & (EVIL_CODE|ELLIPSIS_CODE|USER_CODE))
62 return y->h.code - x->h.code;
63 if (x->h.code & (EVIL_CODE|ELLIPSIS_CODE|USER_CODE))
64 return -1;
65
66 /* This is set by compute_conversion_costs, for calling a non-const
67 member function from a const member function. */
68 if ((y->harshness[0].code & CONST_CODE) ^ (x->harshness[0].code & CONST_CODE))
69 return y->harshness[0].code - x->harshness[0].code;
70
71 if (y->h.code & STD_CODE)
72 {
73 if (x->h.code & STD_CODE)
74 return y->h.distance - x->h.distance;
75 return 1;
76 }
77 if (x->h.code & STD_CODE)
78 return -1;
79
80 return y->h.code - x->h.code;
81 }
82
83 /* Compare two candidates, argument by argument. */
84 int
85 rank_for_ideal (x, y)
86 struct candidate *x, *y;
87 {
88 int i;
89
90 if (x->h_len != y->h_len)
91 abort ();
92
93 for (i = 0; i < x->h_len; i++)
94 {
95 if (y->harshness[i].code - x->harshness[i].code)
96 return y->harshness[i].code - x->harshness[i].code;
97 if ((y->harshness[i].code & STD_CODE)
98 && (y->harshness[i].distance - x->harshness[i].distance))
99 return y->harshness[i].distance - x->harshness[i].distance;
100
101 /* They're both the same code. Now see if we're dealing with an
102 integral promotion that needs a finer grain of accuracy. */
103 if (y->harshness[0].code & PROMO_CODE
104 && (y->harshness[i].int_penalty ^ x->harshness[i].int_penalty))
105 return y->harshness[i].int_penalty - x->harshness[i].int_penalty;
106 }
107 return 0;
108 }
109
110 /* TYPE is the type we wish to convert to. PARM is the parameter
111 we have to work with. We use a somewhat arbitrary cost function
112 to measure this conversion. */
113 static struct harshness_code
114 convert_harshness (type, parmtype, parm)
115 register tree type, parmtype;
116 tree parm;
117 {
118 struct harshness_code h;
119 register enum tree_code codel;
120 register enum tree_code coder;
121 int lvalue;
122
123 h.code = 0;
124 h.distance = 0;
125 h.int_penalty = 0;
126
127 #ifdef GATHER_STATISTICS
128 n_convert_harshness++;
129 #endif
130
131 if (TYPE_PTRMEMFUNC_P (type))
132 type = TYPE_PTRMEMFUNC_FN_TYPE (type);
133 if (TYPE_PTRMEMFUNC_P (parmtype))
134 parmtype = TYPE_PTRMEMFUNC_FN_TYPE (parmtype);
135
136 if (TREE_CODE (parmtype) == REFERENCE_TYPE)
137 {
138 if (parm)
139 parm = convert_from_reference (parm);
140 parmtype = TREE_TYPE (parmtype);
141 lvalue = 1;
142 }
143 else if (parm)
144 lvalue = lvalue_p (parm);
145 else
146 lvalue = 0;
147
148 codel = TREE_CODE (type);
149 coder = TREE_CODE (parmtype);
150
151 if (TYPE_MAIN_VARIANT (parmtype) == TYPE_MAIN_VARIANT (type))
152 return ZERO_RETURN (h);
153
154 if (coder == ERROR_MARK)
155 return EVIL_RETURN (h);
156
157 if (codel == POINTER_TYPE && fntype_p (parmtype))
158 {
159 tree p1, p2;
160 struct harshness_code h1, h2;
161
162 /* Get to the METHOD_TYPE or FUNCTION_TYPE that this might be. */
163 type = TREE_TYPE (type);
164
165 if (coder == POINTER_TYPE)
166 {
167 parmtype = TREE_TYPE (parmtype);
168 coder = TREE_CODE (parmtype);
169 }
170
171 if (coder != TREE_CODE (type))
172 return EVIL_RETURN (h);
173
174 /* We allow the default conversion between function type
175 and pointer-to-function type for free. */
176 if (type == parmtype)
177 return ZERO_RETURN (h);
178
179 /* Compare return types. */
180 p1 = TREE_TYPE (type);
181 p2 = TREE_TYPE (parmtype);
182 h2 = convert_harshness (p1, p2, NULL_TREE);
183 if (h2.code & EVIL_CODE)
184 return h2;
185
186 h1.code = TRIVIAL_CODE;
187 h1.distance = 0;
188
189 if (h2.distance != 0)
190 {
191 tree binfo;
192
193 /* This only works for pointers. */
194 if (TREE_CODE (p1) != POINTER_TYPE
195 && TREE_CODE (p1) != REFERENCE_TYPE)
196 return EVIL_RETURN (h);
197
198 p1 = TREE_TYPE (p1);
199 p2 = TREE_TYPE (p2);
200 /* Don't die if we happen to be dealing with void*. */
201 if (!IS_AGGR_TYPE (p1) || !IS_AGGR_TYPE (p2))
202 return EVIL_RETURN (h);
203 if (h2.distance < 0)
204 binfo = get_binfo (p2, p1, 0);
205 else
206 binfo = get_binfo (p1, p2, 0);
207
208 if (! BINFO_OFFSET_ZEROP (binfo))
209 {
210 static int explained = 0;
211 if (h2.distance < 0)
212 message_2_types (sorry, "cannot cast `%d' to `%d' at function call site", p2, p1);
213 else
214 message_2_types (sorry, "cannot cast `%d' to `%d' at function call site", p1, p2);
215
216 if (! explained++)
217 sorry ("(because pointer values change during conversion)");
218 return EVIL_RETURN (h);
219 }
220 }
221
222 h1.code |= h2.code;
223 if (h2.distance > h1.distance)
224 h1.distance = h2.distance;
225
226 p1 = TYPE_ARG_TYPES (type);
227 p2 = TYPE_ARG_TYPES (parmtype);
228 while (p1 && TREE_VALUE (p1) != void_type_node
229 && p2 && TREE_VALUE (p2) != void_type_node)
230 {
231 h2 = convert_harshness (TREE_VALUE (p1), TREE_VALUE (p2),
232 NULL_TREE);
233 if (h2.code & EVIL_CODE)
234 return h2;
235
236 if (h2.distance)
237 {
238 /* This only works for pointers and references. */
239 if (TREE_CODE (TREE_VALUE (p1)) != POINTER_TYPE
240 && TREE_CODE (TREE_VALUE (p1)) != REFERENCE_TYPE)
241 return EVIL_RETURN (h);
242 h2.distance = - h2.distance;
243 }
244
245 h1.code |= h2.code;
246 if (h2.distance > h1.distance)
247 h1.distance = h2.distance;
248 p1 = TREE_CHAIN (p1);
249 p2 = TREE_CHAIN (p2);
250 }
251 if (p1 == p2)
252 return h1;
253 if (p2)
254 {
255 if (p1)
256 return EVIL_RETURN (h);
257 h1.code |= ELLIPSIS_CODE;
258 return h1;
259 }
260 if (p1)
261 {
262 if (TREE_PURPOSE (p1) == NULL_TREE)
263 h1.code |= EVIL_CODE;
264 return h1;
265 }
266 }
267 else if (codel == POINTER_TYPE && coder == OFFSET_TYPE)
268 {
269 /* Get to the OFFSET_TYPE that this might be. */
270 type = TREE_TYPE (type);
271
272 if (coder != TREE_CODE (type))
273 return EVIL_RETURN (h);
274
275 if (TYPE_OFFSET_BASETYPE (type) == TYPE_OFFSET_BASETYPE (parmtype))
276 h.code = 0;
277 else if (UNIQUELY_DERIVED_FROM_P (TYPE_OFFSET_BASETYPE (type),
278 TYPE_OFFSET_BASETYPE (parmtype)))
279 {
280 h.code = STD_CODE;
281 h.distance = 1;
282 }
283 else if (UNIQUELY_DERIVED_FROM_P (TYPE_OFFSET_BASETYPE (parmtype),
284 TYPE_OFFSET_BASETYPE (type)))
285 {
286 h.code = STD_CODE;
287 h.distance = -1;
288 }
289 else
290 return EVIL_RETURN (h);
291 /* Now test the OFFSET_TYPE's target compatibility. */
292 type = TREE_TYPE (type);
293 parmtype = TREE_TYPE (parmtype);
294 }
295
296 if (coder == UNKNOWN_TYPE)
297 {
298 if (codel == FUNCTION_TYPE
299 || codel == METHOD_TYPE
300 || (codel == POINTER_TYPE
301 && (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE
302 || TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)))
303 return TRIVIAL_RETURN (h);
304 return EVIL_RETURN (h);
305 }
306
307 if (coder == VOID_TYPE)
308 return EVIL_RETURN (h);
309
310 if (codel == BOOLEAN_TYPE)
311 {
312 if (INTEGRAL_CODE_P (coder) || coder == REAL_TYPE)
313 return STD_RETURN (h);
314 else if (coder == POINTER_TYPE || coder == OFFSET_TYPE)
315 {
316 /* Make this worse than any conversion to another pointer.
317 FIXME this is how I think the language should work, but it may not
318 end up being how the language is standardized (jason 1/30/95). */
319 h.distance = 32767;
320 return STD_RETURN (h);
321 }
322 return EVIL_RETURN (h);
323 }
324
325 if (INTEGRAL_CODE_P (codel))
326 {
327 /* Control equivalence of ints an enums. */
328
329 if (codel == ENUMERAL_TYPE
330 && flag_int_enum_equivalence == 0)
331 {
332 /* Enums can be converted to ints, but not vice-versa. */
333 if (coder != ENUMERAL_TYPE
334 || TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (parmtype))
335 return EVIL_RETURN (h);
336 }
337
338 /* else enums and ints (almost) freely interconvert. */
339
340 if (INTEGRAL_CODE_P (coder))
341 {
342 if (TYPE_MAIN_VARIANT (type)
343 == TYPE_MAIN_VARIANT (type_promotes_to (parmtype)))
344 {
345 h.code = PROMO_CODE;
346 #if 0 /* What purpose does this serve? -jason */
347 /* A char, short, wchar_t, etc., should promote to an int if
348 it can handle it, otherwise to an unsigned. So we'll make
349 an unsigned. */
350 if (type != integer_type_node)
351 h.int_penalty = 1;
352 #endif
353 }
354 else
355 h.code = STD_CODE;
356
357 return h;
358 }
359 else if (coder == REAL_TYPE)
360 {
361 h.code = STD_CODE;
362 h.distance = 0;
363 return h;
364 }
365 }
366
367 if (codel == REAL_TYPE)
368 {
369 if (coder == REAL_TYPE)
370 {
371 if (TYPE_MAIN_VARIANT (type)
372 == TYPE_MAIN_VARIANT (type_promotes_to (parmtype)))
373 h.code = PROMO_CODE;
374 else
375 h.code = STD_CODE;
376
377 return h;
378 }
379 else if (INTEGRAL_CODE_P (coder))
380 {
381 h.code = STD_CODE;
382 h.distance = 0;
383 return h;
384 }
385 }
386
387 /* Convert arrays which have not previously been converted. */
388 if (codel == ARRAY_TYPE)
389 codel = POINTER_TYPE;
390 if (coder == ARRAY_TYPE)
391 coder = POINTER_TYPE;
392
393 /* Conversions among pointers */
394 if (codel == POINTER_TYPE && coder == POINTER_TYPE)
395 {
396 register tree ttl = TYPE_MAIN_VARIANT (TREE_TYPE (type));
397 register tree ttr = TYPE_MAIN_VARIANT (TREE_TYPE (parmtype));
398 int penalty = 4 * (ttl != ttr);
399
400 /* Anything converts to void *. Since this may be `const void *'
401 (etc.) use VOID_TYPE instead of void_type_node. Otherwise, the
402 targets must be the same, except that we do allow (at some cost)
403 conversion between signed and unsigned pointer types. */
404
405 if ((TREE_CODE (ttl) == METHOD_TYPE
406 || TREE_CODE (ttl) == FUNCTION_TYPE)
407 && TREE_CODE (ttl) == TREE_CODE (ttr))
408 {
409 if (comptypes (ttl, ttr, -1))
410 {
411 h.code = penalty ? STD_CODE : 0;
412 h.distance = 0;
413 }
414 else
415 h.code = EVIL_CODE;
416 return h;
417 }
418
419 #if 1
420 if (TREE_CODE (ttl) != VOID_TYPE
421 && (TREE_CODE (ttr) != VOID_TYPE || !parm || !integer_zerop (parm)))
422 {
423 if (TREE_UNSIGNED (ttl) != TREE_UNSIGNED (ttr))
424 {
425 ttl = unsigned_type (ttl);
426 ttr = unsigned_type (ttr);
427 penalty = 10;
428 }
429 if (comp_target_types (ttl, ttr, 0) <= 0)
430 return EVIL_RETURN (h);
431 }
432 #else
433 if (!(TREE_CODE (ttl) == VOID_TYPE
434 || TREE_CODE (ttr) == VOID_TYPE
435 || (TREE_UNSIGNED (ttl) ^ TREE_UNSIGNED (ttr)
436 && (ttl = unsigned_type (ttl),
437 ttr = unsigned_type (ttr),
438 penalty = 10, 0))
439 || (comp_target_types (ttl, ttr, 0) > 0)))
440 return EVIL_RETURN (h);
441 #endif
442
443 if (penalty == 10 || ttr == ttl)
444 {
445 tree tmp1 = TREE_TYPE (type), tmp2 = TREE_TYPE (parmtype);
446
447 /* If one was unsigned but the other wasn't, then we need to
448 do a standard conversion from T to unsigned T. */
449 if (penalty == 10)
450 h.code = PROMO_CODE; /* was STD_CODE */
451 else
452 h.code = 0;
453
454 /* Note conversion from `T*' to `const T*',
455 or `T*' to `volatile T*'. */
456 if (ttl == ttr
457 && ((TYPE_READONLY (tmp1) != TREE_READONLY (tmp2))
458 || (TYPE_VOLATILE (tmp1) != TYPE_VOLATILE (tmp2))))
459 h.code |= QUAL_CODE;
460
461 h.distance = 0;
462 return h;
463 }
464
465
466 if (TREE_CODE (ttl) == RECORD_TYPE && TREE_CODE (ttr) == RECORD_TYPE)
467 {
468 int b_or_d = get_base_distance (ttl, ttr, 0, 0);
469 if (b_or_d < 0)
470 {
471 b_or_d = get_base_distance (ttr, ttl, 0, 0);
472 if (b_or_d < 0)
473 return EVIL_RETURN (h);
474 h.distance = -b_or_d;
475 }
476 else
477 h.distance = b_or_d;
478 h.code = STD_CODE;
479 return h;
480 }
481
482 /* If converting from a `class*' to a `void*', make it
483 less favorable than any inheritance relationship. */
484 if (TREE_CODE (ttl) == VOID_TYPE && IS_AGGR_TYPE (ttr))
485 {
486 h.code = STD_CODE;
487 h.distance = CLASSTYPE_MAX_DEPTH (ttr)+1;
488 return h;
489 }
490 h.code = penalty ? STD_CODE : PROMO_CODE;
491 return h;
492 }
493
494 if (codel == POINTER_TYPE && coder == INTEGER_TYPE)
495 {
496 /* This is not a bad match, but don't let it beat
497 integer-enum combinations. */
498 if (parm && integer_zerop (parm))
499 {
500 h.code = STD_CODE;
501 h.distance = 0;
502 return h;
503 }
504 }
505
506 /* C++: Since the `this' parameter of a signature member function
507 is represented as a signature pointer to handle default implementations
508 correctly, we can have the case that `type' is a signature pointer
509 while `parmtype' is a pointer to a signature table. We don't really
510 do any conversions in this case, so just return 0. */
511
512 if (codel == RECORD_TYPE && coder == POINTER_TYPE
513 && IS_SIGNATURE_POINTER (type) && IS_SIGNATURE (TREE_TYPE (parmtype)))
514 return ZERO_RETURN (h);
515
516 if (codel == REFERENCE_TYPE)
517 {
518 tree ttl, ttr;
519 int constp = parm ? TREE_READONLY (parm) : TYPE_READONLY (parmtype);
520 int volatilep = (parm ? TREE_THIS_VOLATILE (parm)
521 : TYPE_VOLATILE (parmtype));
522 register tree intype = TYPE_MAIN_VARIANT (parmtype);
523 register enum tree_code form = TREE_CODE (intype);
524 int penalty = 0;
525
526 ttl = TREE_TYPE (type);
527
528 /* Only allow const reference binding if we were given a parm to deal
529 with, since it isn't really a conversion. This is a hack to
530 prevent build_type_conversion from finding this conversion, but
531 still allow overloading to find it. */
532 if (! lvalue && ! (parm && TYPE_READONLY (ttl)))
533 return EVIL_RETURN (h);
534
535 if (TYPE_READONLY (ttl) < constp
536 || TYPE_VOLATILE (ttl) < volatilep)
537 return EVIL_RETURN (h);
538
539 /* When passing a non-const argument into a const reference, dig it a
540 little, so a non-const reference is preferred over this one. */
541 penalty = ((TYPE_READONLY (ttl) > constp)
542 + (TYPE_VOLATILE (ttl) > volatilep));
543
544 ttl = TYPE_MAIN_VARIANT (ttl);
545
546 if (form == OFFSET_TYPE)
547 {
548 intype = TREE_TYPE (intype);
549 form = TREE_CODE (intype);
550 }
551
552 ttr = intype;
553
554 /* Maybe handle conversion to base here? */
555
556 h = convert_harshness (ttl, ttr, NULL_TREE);
557 if (penalty && h.code == 0)
558 {
559 h.code = QUAL_CODE;
560 h.int_penalty = penalty;
561 }
562 return h;
563 }
564 if (codel == RECORD_TYPE && coder == RECORD_TYPE)
565 {
566 int b_or_d = get_base_distance (type, parmtype, 0, 0);
567 if (b_or_d < 0)
568 {
569 b_or_d = get_base_distance (parmtype, type, 0, 0);
570 if (b_or_d < 0)
571 return EVIL_RETURN (h);
572 h.distance = -b_or_d;
573 }
574 else
575 h.distance = b_or_d;
576 h.code = STD_CODE;
577 return h;
578 }
579 return EVIL_RETURN (h);
580 }
581
582 int
583 can_convert (to, from)
584 tree to, from;
585 {
586 struct harshness_code h;
587 h = convert_harshness (to, from, NULL_TREE);
588 return h.code < USER_CODE;
589 }
590
591 #ifdef DEBUG_MATCHING
592 static char *
593 print_harshness (h)
594 struct harshness_code *h;
595 {
596 static char buf[1024];
597 char tmp[1024];
598
599 bzero (buf, 1024 * sizeof (char));
600 strcat (buf, "codes=[");
601 if (h->code & EVIL_CODE)
602 strcat (buf, "EVIL");
603 if (h->code & CONST_CODE)
604 strcat (buf, " CONST");
605 if (h->code & ELLIPSIS_CODE)
606 strcat (buf, " ELLIPSIS");
607 if (h->code & USER_CODE)
608 strcat (buf, " USER");
609 if (h->code & STD_CODE)
610 strcat (buf, " STD");
611 if (h->code & PROMO_CODE)
612 strcat (buf, " PROMO");
613 if (h->code & QUAL_CODE)
614 strcat (buf, " QUAL");
615 if (h->code & TRIVIAL_CODE)
616 strcat (buf, " TRIVIAL");
617 if (buf[0] == '\0')
618 strcat (buf, "0");
619
620 sprintf (tmp, "] distance=%d int_penalty=%d", h->distance, h->int_penalty);
621
622 strcat (buf, tmp);
623
624 return buf;
625 }
626 #endif
627
628 /* Algorithm: For each argument, calculate how difficult it is to
629 make FUNCTION accept that argument. If we can easily tell that
630 FUNCTION won't be acceptable to one of the arguments, then we
631 don't need to compute the ease of converting the other arguments,
632 since it will never show up in the intersection of all arguments'
633 favorite functions.
634
635 Conversions between builtin and user-defined types are allowed, but
636 no function involving such a conversion is preferred to one which
637 does not require such a conversion. Furthermore, such conversions
638 must be unique. */
639
640 void
641 compute_conversion_costs (function, tta_in, cp, arglen)
642 tree function;
643 tree tta_in;
644 struct candidate *cp;
645 int arglen;
646 {
647 tree ttf_in = TYPE_ARG_TYPES (TREE_TYPE (function));
648 tree ttf = ttf_in;
649 tree tta = tta_in;
650
651 /* Start out with no strikes against. */
652 int evil_strikes = 0;
653 int ellipsis_strikes = 0;
654 int user_strikes = 0;
655 int b_or_d_strikes = 0;
656 int easy_strikes = 0;
657
658 int strike_index = 0, win;
659 struct harshness_code lose;
660 extern int cp_silent;
661
662 #ifdef GATHER_STATISTICS
663 n_compute_conversion_costs++;
664 #endif
665
666 #ifndef DEBUG_MATCHING
667 /* We don't emit any warnings or errors while trying out each candidate. */
668 cp_silent = 1;
669 #endif
670
671 cp->function = function;
672 cp->arg = tta ? TREE_VALUE (tta) : NULL_TREE;
673 cp->u.bad_arg = 0; /* optimistic! */
674
675 cp->h.code = 0;
676 cp->h.distance = 0;
677 cp->h.int_penalty = 0;
678 bzero ((char *) cp->harshness,
679 (cp->h_len + 1) * sizeof (struct harshness_code));
680
681 while (ttf && tta)
682 {
683 struct harshness_code h;
684
685 if (ttf == void_list_node)
686 break;
687
688 if (type_unknown_p (TREE_VALUE (tta)))
689 {
690 /* Must perform some instantiation here. */
691 tree rhs = TREE_VALUE (tta);
692 tree lhstype = TREE_VALUE (ttf);
693
694 /* Keep quiet about possible contravariance violations. */
695 int old_inhibit_warnings = inhibit_warnings;
696 inhibit_warnings = 1;
697
698 /* @@ This is to undo what `grokdeclarator' does to
699 parameter types. It really should go through
700 something more general. */
701
702 TREE_TYPE (tta) = unknown_type_node;
703 rhs = instantiate_type (lhstype, rhs, 0);
704 inhibit_warnings = old_inhibit_warnings;
705
706 if (TREE_CODE (rhs) == ERROR_MARK)
707 h.code = EVIL_CODE;
708 else
709 h = convert_harshness (lhstype, TREE_TYPE (rhs), rhs);
710 }
711 else
712 {
713 #ifdef DEBUG_MATCHING
714 static tree old_function = NULL_TREE;
715
716 if (!old_function || function != old_function)
717 {
718 cp_error ("trying %D", function);
719 old_function = function;
720 }
721
722 cp_error (" doing (%T) %E against arg %T",
723 TREE_TYPE (TREE_VALUE (tta)), TREE_VALUE (tta),
724 TREE_VALUE (ttf));
725 #endif
726
727 h = convert_harshness (TREE_VALUE (ttf),
728 TREE_TYPE (TREE_VALUE (tta)),
729 TREE_VALUE (tta));
730
731 #ifdef DEBUG_MATCHING
732 cp_error (" evaluated %s", print_harshness (&h));
733 #endif
734 }
735
736 cp->harshness[strike_index] = h;
737 if ((h.code & EVIL_CODE)
738 || ((h.code & STD_CODE) && h.distance < 0))
739 {
740 cp->u.bad_arg = strike_index;
741 evil_strikes = 1;
742 }
743 else if (h.code & ELLIPSIS_CODE)
744 ellipsis_strikes += 1;
745 #if 0
746 /* This is never set by `convert_harshness'. */
747 else if (h.code & USER_CODE)
748 {
749 user_strikes += 1;
750 }
751 #endif
752 else
753 {
754 if ((h.code & STD_CODE) && h.distance)
755 {
756 if (h.distance > b_or_d_strikes)
757 b_or_d_strikes = h.distance;
758 }
759 else
760 easy_strikes += (h.code & (STD_CODE|PROMO_CODE|TRIVIAL_CODE));
761 cp->h.code |= h.code;
762 /* Make sure we communicate this. */
763 cp->h.int_penalty += h.int_penalty;
764 }
765
766 ttf = TREE_CHAIN (ttf);
767 tta = TREE_CHAIN (tta);
768 strike_index += 1;
769 }
770
771 if (tta)
772 {
773 /* ran out of formals, and parmlist is fixed size. */
774 if (ttf /* == void_type_node */)
775 {
776 cp->h.code = EVIL_CODE;
777 cp->u.bad_arg = -1;
778 cp_silent = 0;
779 return;
780 }
781 else
782 {
783 struct harshness_code h;
784 int l = list_length (tta);
785 ellipsis_strikes += l;
786 h.code = ELLIPSIS_CODE;
787 h.distance = 0;
788 h.int_penalty = 0;
789 for (; l; --l)
790 cp->harshness[strike_index++] = h;
791 }
792 }
793 else if (ttf && ttf != void_list_node)
794 {
795 /* ran out of actuals, and no defaults. */
796 if (TREE_PURPOSE (ttf) == NULL_TREE)
797 {
798 cp->h.code = EVIL_CODE;
799 cp->u.bad_arg = -2;
800 cp_silent = 0;
801 return;
802 }
803 /* Store index of first default. */
804 cp->harshness[arglen].distance = strike_index+1;
805 }
806 else
807 cp->harshness[arglen].distance = 0;
808
809 /* Argument list lengths work out, so don't need to check them again. */
810 if (evil_strikes)
811 {
812 /* We do not check for derived->base conversions here, since in
813 no case would they give evil strike counts, unless such conversions
814 are somehow ambiguous. */
815
816 /* See if any user-defined conversions apply.
817 But make sure that we do not loop. */
818 static int dont_convert_types = 0;
819
820 if (dont_convert_types)
821 {
822 cp->h.code = EVIL_CODE;
823 cp_silent = 0;
824 return;
825 }
826
827 win = 0; /* Only get one chance to win. */
828 ttf = TYPE_ARG_TYPES (TREE_TYPE (function));
829 tta = tta_in;
830 strike_index = 0;
831 evil_strikes = 0;
832
833 while (ttf && tta)
834 {
835 if (ttf == void_list_node)
836 break;
837
838 lose = cp->harshness[strike_index];
839 if ((lose.code & EVIL_CODE)
840 || ((lose.code & STD_CODE) && lose.distance < 0))
841 {
842 tree actual_type = TREE_TYPE (TREE_VALUE (tta));
843 tree formal_type = TREE_VALUE (ttf);
844 int extra_conversions = 0;
845
846 dont_convert_types = 1;
847
848 if (TREE_CODE (formal_type) == REFERENCE_TYPE)
849 formal_type = TREE_TYPE (formal_type);
850 if (TREE_CODE (actual_type) == REFERENCE_TYPE)
851 actual_type = TREE_TYPE (actual_type);
852
853 if (formal_type != error_mark_node
854 && actual_type != error_mark_node)
855 {
856 formal_type = TYPE_MAIN_VARIANT (formal_type);
857 actual_type = TYPE_MAIN_VARIANT (actual_type);
858
859 if (TYPE_HAS_CONSTRUCTOR (formal_type))
860 {
861 /* If it has a constructor for this type,
862 try to use it. */
863 /* @@ There is no way to save this result yet, so
864 success is a NULL_TREE for now. */
865 if (convert_to_aggr (formal_type, TREE_VALUE (tta), 0, 1)
866 != error_mark_node)
867 win++;
868 }
869 if (TYPE_LANG_SPECIFIC (actual_type)
870 && TYPE_HAS_CONVERSION (actual_type))
871 {
872 tree conv;
873 /* Don't issue warnings since we're only groping
874 around for the right answer, we haven't yet
875 committed to going with this solution. */
876 int old_inhibit_warnings = inhibit_warnings;
877
878 inhibit_warnings = 1;
879 conv = build_type_conversion
880 (CALL_EXPR, TREE_VALUE (ttf), TREE_VALUE (tta), 0);
881 inhibit_warnings = old_inhibit_warnings;
882
883 if (conv)
884 {
885 if (conv == error_mark_node)
886 win += 2;
887 else
888 {
889 win++;
890 if (TREE_CODE (conv) != CALL_EXPR)
891 extra_conversions = 1;
892 }
893 }
894 else if (TREE_CODE (TREE_VALUE (ttf)) == REFERENCE_TYPE)
895 {
896 conv = build_type_conversion (CALL_EXPR, formal_type,
897 TREE_VALUE (tta), 0);
898 if (conv)
899 {
900 if (conv == error_mark_node)
901 win += 2;
902 else
903 {
904 win++;
905 if (TREE_CODE (conv) != CALL_EXPR)
906 extra_conversions = 1;
907 }
908 }
909 }
910 }
911 }
912 dont_convert_types = 0;
913
914 if (win == 1)
915 {
916 user_strikes += 1;
917 cp->harshness[strike_index].code
918 = USER_CODE | (extra_conversions ? STD_CODE : 0);
919 win = 0;
920 }
921 else
922 {
923 if (cp->u.bad_arg > strike_index)
924 cp->u.bad_arg = strike_index;
925
926 evil_strikes = win ? 2 : 1;
927 break;
928 }
929 }
930
931 ttf = TREE_CHAIN (ttf);
932 tta = TREE_CHAIN (tta);
933 strike_index += 1;
934 }
935 }
936
937 /* Const member functions get a small penalty because defaulting
938 to const is less useful than defaulting to non-const. */
939 /* This is bogus, it does not correspond to anything in the ARM.
940 This code will be fixed when this entire section is rewritten
941 to conform to the ARM. (mrs) */
942 if (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE)
943 {
944 tree this_parm = TREE_VALUE (ttf_in);
945
946 if (TREE_CODE (this_parm) == RECORD_TYPE /* Is `this' a sig ptr? */
947 ? TYPE_READONLY (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (this_parm))))
948 : TYPE_READONLY (TREE_TYPE (this_parm)))
949 {
950 cp->harshness[0].code |= TRIVIAL_CODE;
951 ++easy_strikes;
952 }
953 else
954 {
955 /* Calling a non-const member function from a const member function
956 is probably invalid, but for now we let it only draw a warning.
957 We indicate that such a mismatch has occurred by setting the
958 harshness to a maximum value. */
959 if (TREE_CODE (TREE_TYPE (TREE_VALUE (tta_in))) == POINTER_TYPE
960 && (TYPE_READONLY (TREE_TYPE (TREE_TYPE (TREE_VALUE (tta_in))))))
961 cp->harshness[0].code |= CONST_CODE;
962 }
963 }
964
965 if (evil_strikes)
966 cp->h.code = EVIL_CODE;
967 if (ellipsis_strikes)
968 cp->h.code |= ELLIPSIS_CODE;
969 if (user_strikes)
970 cp->h.code |= USER_CODE;
971 cp_silent = 0;
972 #ifdef DEBUG_MATCHING
973 cp_error ("final eval %s", print_harshness (&cp->h));
974 #endif
975 }
976
977 /* Subroutine of ideal_candidate. See if X or Y is a better match
978 than the other. */
979 static int
980 strictly_better (x, y)
981 unsigned short x, y;
982 {
983 unsigned short xor;
984
985 if (x == y)
986 return 0;
987
988 xor = x ^ y;
989 if (xor >= x || xor >= y)
990 return 1;
991 return 0;
992 }
993
994 /* When one of several possible overloaded functions and/or methods
995 can be called, choose the best candidate for overloading.
996
997 BASETYPE is the context from which we start method resolution
998 or NULL if we are comparing overloaded functions.
999 CANDIDATES is the array of candidates we have to choose from.
1000 N_CANDIDATES is the length of CANDIDATES.
1001 PARMS is a TREE_LIST of parameters to the function we'll ultimately
1002 choose. It is modified in place when resolving methods. It is not
1003 modified in place when resolving overloaded functions.
1004 LEN is the length of the parameter list. */
1005
1006 static struct candidate *
1007 ideal_candidate (basetype, candidates, n_candidates, parms, len)
1008 tree basetype;
1009 struct candidate *candidates;
1010 int n_candidates;
1011 tree parms;
1012 int len;
1013 {
1014 struct candidate *cp = candidates+n_candidates;
1015 int i, j = -1, best_code;
1016
1017 /* For each argument, sort the functions from best to worst for the arg.
1018 For each function that's not best for this arg, set its overall
1019 harshness to EVIL so that other args won't like it. The candidate
1020 list for the last argument is the intersection of all the best-liked
1021 functions. */
1022
1023 #if 0
1024 for (i = 0; i < len; i++)
1025 {
1026 qsort (candidates, n_candidates, sizeof (struct candidate),
1027 rank_for_overload);
1028 best_code = cp[-1].h.code;
1029
1030 /* To find out functions that are worse than that represented
1031 by BEST_CODE, we can't just do a comparison like h.code>best_code.
1032 The total harshness for the "best" fn may be 8|8 for two args, and
1033 the harshness for the next-best may be 8|2. If we just compared,
1034 that would be checking 8>10, which would lead to the next-best
1035 being disqualified. What we actually want to do is get rid
1036 of functions that are definitely worse than that represented
1037 by best_code, i.e. those which have bits set higher than the
1038 highest in best_code. Sooooo, what we do is clear out everything
1039 represented by best_code, and see if we still come up with something
1040 higher. If so (e.g., 8|8 vs 8|16), it'll disqualify it properly. */
1041 for (j = n_candidates-2; j >= 0; j--)
1042 if ((candidates[j].h.code & ~best_code) > best_code)
1043 candidates[j].h.code = EVIL_CODE;
1044 }
1045
1046 if (cp[-1].h.code & EVIL_CODE)
1047 return NULL;
1048 #else
1049 qsort (candidates, n_candidates, sizeof (struct candidate),
1050 rank_for_overload);
1051 best_code = cp[-1].h.code;
1052 #endif
1053
1054 /* If they're at least as good as each other, do an arg-by-arg check. */
1055 if (! strictly_better (cp[-1].h.code, cp[-2].h.code))
1056 {
1057 int better = 0;
1058 int worse = 0;
1059
1060 for (j = 0; j < n_candidates; j++)
1061 if (! strictly_better (candidates[j].h.code, best_code))
1062 break;
1063
1064 qsort (candidates+j, n_candidates-j, sizeof (struct candidate),
1065 rank_for_ideal);
1066 for (i = 0; i < len; i++)
1067 {
1068 if (cp[-1].harshness[i].code < cp[-2].harshness[i].code)
1069 better = 1;
1070 else if (cp[-1].harshness[i].code > cp[-2].harshness[i].code)
1071 worse = 1;
1072 else if (cp[-1].harshness[i].code & STD_CODE)
1073 {
1074 /* If it involves a standard conversion, let the
1075 inheritance lattice be the final arbiter. */
1076 if (cp[-1].harshness[i].distance > cp[-2].harshness[i].distance)
1077 worse = 1;
1078 else if (cp[-1].harshness[i].distance < cp[-2].harshness[i].distance)
1079 better = 1;
1080 }
1081 else if (cp[-1].harshness[i].code & PROMO_CODE)
1082 {
1083 /* For integral promotions, take into account a finer
1084 granularity for determining which types should be favored
1085 over others in such promotions. */
1086 if (cp[-1].harshness[i].int_penalty > cp[-2].harshness[i].int_penalty)
1087 worse = 1;
1088 else if (cp[-1].harshness[i].int_penalty < cp[-2].harshness[i].int_penalty)
1089 better = 1;
1090 }
1091 }
1092
1093 if (! better || worse)
1094 return NULL;
1095 }
1096 return cp-1;
1097 }
1098
1099 /* Assume that if the class referred to is not in the
1100 current class hierarchy, that it may be remote.
1101 PARENT is assumed to be of aggregate type here. */
1102 static int
1103 may_be_remote (parent)
1104 tree parent;
1105 {
1106 if (TYPE_OVERLOADS_METHOD_CALL_EXPR (parent) == 0)
1107 return 0;
1108
1109 if (current_class_type == NULL_TREE)
1110 return 0;
1111
1112 if (parent == current_class_type)
1113 return 0;
1114
1115 if (UNIQUELY_DERIVED_FROM_P (parent, current_class_type))
1116 return 0;
1117 return 1;
1118 }
1119
1120 tree
1121 build_vfield_ref (datum, type)
1122 tree datum, type;
1123 {
1124 tree rval;
1125 int old_assume_nonnull_objects = flag_assume_nonnull_objects;
1126
1127 if (datum == error_mark_node)
1128 return error_mark_node;
1129
1130 /* Vtable references are always made from non-null objects. */
1131 flag_assume_nonnull_objects = 1;
1132 if (TREE_CODE (TREE_TYPE (datum)) == REFERENCE_TYPE)
1133 datum = convert_from_reference (datum);
1134
1135 if (! TYPE_USES_COMPLEX_INHERITANCE (type))
1136 rval = build (COMPONENT_REF, TREE_TYPE (CLASSTYPE_VFIELD (type)),
1137 datum, CLASSTYPE_VFIELD (type));
1138 else
1139 rval = build_component_ref (datum, DECL_NAME (CLASSTYPE_VFIELD (type)), 0, 0);
1140 flag_assume_nonnull_objects = old_assume_nonnull_objects;
1141
1142 return rval;
1143 }
1144
1145 /* Build a call to a member of an object. I.e., one that overloads
1146 operator ()(), or is a pointer-to-function or pointer-to-method. */
1147 static tree
1148 build_field_call (basetype_path, instance_ptr, name, parms)
1149 tree basetype_path, instance_ptr, name, parms;
1150 {
1151 tree field, instance;
1152
1153 if (instance_ptr == current_class_decl)
1154 {
1155 /* Check to see if we really have a reference to an instance variable
1156 with `operator()()' overloaded. */
1157 field = IDENTIFIER_CLASS_VALUE (name);
1158
1159 if (field == NULL_TREE)
1160 {
1161 cp_error ("`this' has no member named `%D'", name);
1162 return error_mark_node;
1163 }
1164
1165 if (TREE_CODE (field) == FIELD_DECL)
1166 {
1167 /* If it's a field, try overloading operator (),
1168 or calling if the field is a pointer-to-function. */
1169 instance = build_component_ref_1 (C_C_D, field, 0);
1170 if (instance == error_mark_node)
1171 return error_mark_node;
1172
1173 if (TYPE_LANG_SPECIFIC (TREE_TYPE (instance))
1174 && TYPE_OVERLOADS_CALL_EXPR (TREE_TYPE (instance)))
1175 return build_opfncall (CALL_EXPR, LOOKUP_NORMAL, instance, parms, NULL_TREE);
1176
1177 if (TREE_CODE (TREE_TYPE (instance)) == POINTER_TYPE)
1178 {
1179 if (TREE_CODE (TREE_TYPE (TREE_TYPE (instance))) == FUNCTION_TYPE)
1180 return build_function_call (instance, parms);
1181 else if (TREE_CODE (TREE_TYPE (TREE_TYPE (instance))) == METHOD_TYPE)
1182 return build_function_call (instance, tree_cons (NULL_TREE, current_class_decl, parms));
1183 }
1184 }
1185 return NULL_TREE;
1186 }
1187
1188 /* Check to see if this is not really a reference to an instance variable
1189 with `operator()()' overloaded. */
1190 field = lookup_field (basetype_path, name, 1, 0);
1191
1192 /* This can happen if the reference was ambiguous or for access
1193 violations. */
1194 if (field == error_mark_node)
1195 return error_mark_node;
1196
1197 if (field)
1198 {
1199 tree basetype;
1200 tree ftype = TREE_TYPE (field);
1201
1202 if (TREE_CODE (ftype) == REFERENCE_TYPE)
1203 ftype = TREE_TYPE (ftype);
1204
1205 if (TYPE_LANG_SPECIFIC (ftype) && TYPE_OVERLOADS_CALL_EXPR (ftype))
1206 {
1207 /* Make the next search for this field very short. */
1208 basetype = DECL_FIELD_CONTEXT (field);
1209 instance_ptr = convert_pointer_to (basetype, instance_ptr);
1210
1211 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1212 return build_opfncall (CALL_EXPR, LOOKUP_NORMAL,
1213 build_component_ref_1 (instance, field, 0),
1214 parms, NULL_TREE);
1215 }
1216 if (TREE_CODE (ftype) == POINTER_TYPE)
1217 {
1218 if (TREE_CODE (TREE_TYPE (ftype)) == FUNCTION_TYPE
1219 || TREE_CODE (TREE_TYPE (ftype)) == METHOD_TYPE)
1220 {
1221 /* This is a member which is a pointer to function. */
1222 tree ref
1223 = build_component_ref_1 (build_indirect_ref (instance_ptr,
1224 NULL_PTR),
1225 field, LOOKUP_COMPLAIN);
1226 if (ref == error_mark_node)
1227 return error_mark_node;
1228 return build_function_call (ref, parms);
1229 }
1230 }
1231 else if (TREE_CODE (ftype) == METHOD_TYPE)
1232 {
1233 error ("invalid call via pointer-to-member function");
1234 return error_mark_node;
1235 }
1236 else
1237 return NULL_TREE;
1238 }
1239 return NULL_TREE;
1240 }
1241
1242 tree
1243 find_scoped_type (type, inner_name, inner_types)
1244 tree type, inner_name, inner_types;
1245 {
1246 tree tags = CLASSTYPE_TAGS (type);
1247
1248 while (tags)
1249 {
1250 /* The TREE_PURPOSE of an enum tag (which becomes a member of the
1251 enclosing class) is set to the name for the enum type. So, if
1252 inner_name is `bar', and we strike `baz' for `enum bar { baz }',
1253 then this test will be true. */
1254 if (TREE_PURPOSE (tags) == inner_name)
1255 {
1256 if (inner_types == NULL_TREE)
1257 return DECL_NESTED_TYPENAME (TYPE_NAME (TREE_VALUE (tags)));
1258 return resolve_scope_to_name (TREE_VALUE (tags), inner_types);
1259 }
1260 tags = TREE_CHAIN (tags);
1261 }
1262
1263 #if 0
1264 /* XXX This needs to be fixed better. */
1265 if (TREE_CODE (type) == UNINSTANTIATED_P_TYPE)
1266 {
1267 sorry ("nested class lookup in template type");
1268 return NULL_TREE;
1269 }
1270 #endif
1271
1272 /* Look for a TYPE_DECL. */
1273 for (tags = TYPE_FIELDS (type); tags; tags = TREE_CHAIN (tags))
1274 if (TREE_CODE (tags) == TYPE_DECL && DECL_NAME (tags) == inner_name)
1275 {
1276 /* Code by raeburn. */
1277 if (inner_types == NULL_TREE)
1278 return DECL_NESTED_TYPENAME (tags);
1279 return resolve_scope_to_name (TREE_TYPE (tags), inner_types);
1280 }
1281
1282 return NULL_TREE;
1283 }
1284
1285 /* Resolve an expression NAME1::NAME2::...::NAMEn to
1286 the name that names the above nested type. INNER_TYPES
1287 is a chain of nested type names (held together by SCOPE_REFs);
1288 OUTER_TYPE is the type we know to enclose INNER_TYPES.
1289 Returns NULL_TREE if there is an error. */
1290 tree
1291 resolve_scope_to_name (outer_type, inner_stuff)
1292 tree outer_type, inner_stuff;
1293 {
1294 register tree tmp;
1295 tree inner_name, inner_type;
1296
1297 if (outer_type == NULL_TREE && current_class_type != NULL_TREE)
1298 {
1299 /* We first try to look for a nesting in our current class context,
1300 then try any enclosing classes. */
1301 tree type = current_class_type;
1302
1303 while (type && (TREE_CODE (type) == RECORD_TYPE
1304 || TREE_CODE (type) == UNION_TYPE))
1305 {
1306 tree rval = resolve_scope_to_name (type, inner_stuff);
1307
1308 if (rval != NULL_TREE)
1309 return rval;
1310 type = DECL_CONTEXT (TYPE_NAME (type));
1311 }
1312 }
1313
1314 if (TREE_CODE (inner_stuff) == SCOPE_REF)
1315 {
1316 inner_name = TREE_OPERAND (inner_stuff, 0);
1317 inner_type = TREE_OPERAND (inner_stuff, 1);
1318 }
1319 else
1320 {
1321 inner_name = inner_stuff;
1322 inner_type = NULL_TREE;
1323 }
1324
1325 if (outer_type == NULL_TREE)
1326 {
1327 /* If we have something that's already a type by itself,
1328 use that. */
1329 if (IDENTIFIER_HAS_TYPE_VALUE (inner_name))
1330 {
1331 if (inner_type)
1332 return resolve_scope_to_name (IDENTIFIER_TYPE_VALUE (inner_name),
1333 inner_type);
1334 return inner_name;
1335 }
1336 return NULL_TREE;
1337 }
1338
1339 if (! IS_AGGR_TYPE (outer_type))
1340 return NULL_TREE;
1341
1342 /* Look for member classes or enums. */
1343 tmp = find_scoped_type (outer_type, inner_name, inner_type);
1344
1345 /* If it's not a type in this class, then go down into the
1346 base classes and search there. */
1347 if (! tmp && TYPE_BINFO (outer_type))
1348 {
1349 tree binfos = TYPE_BINFO_BASETYPES (outer_type);
1350 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
1351
1352 for (i = 0; i < n_baselinks; i++)
1353 {
1354 tree base_binfo = TREE_VEC_ELT (binfos, i);
1355 tmp = resolve_scope_to_name (BINFO_TYPE (base_binfo), inner_stuff);
1356 if (tmp)
1357 return tmp;
1358 }
1359 tmp = NULL_TREE;
1360 }
1361
1362 return tmp;
1363 }
1364
1365 /* Build a method call of the form `EXP->SCOPES::NAME (PARMS)'.
1366 This is how virtual function calls are avoided. */
1367 tree
1368 build_scoped_method_call (exp, scopes, name, parms)
1369 tree exp, scopes, name, parms;
1370 {
1371 /* Because this syntactic form does not allow
1372 a pointer to a base class to be `stolen',
1373 we need not protect the derived->base conversion
1374 that happens here.
1375
1376 @@ But we do have to check access privileges later. */
1377 tree basename = resolve_scope_to_name (NULL_TREE, scopes);
1378 tree basetype, binfo, decl;
1379 tree type = TREE_TYPE (exp);
1380
1381 if (type == error_mark_node
1382 || basename == NULL_TREE)
1383 return error_mark_node;
1384
1385 basetype = IDENTIFIER_TYPE_VALUE (basename);
1386
1387 if (TREE_CODE (type) == REFERENCE_TYPE)
1388 type = TREE_TYPE (type);
1389
1390 /* Destructors can be "called" for simple types; see 5.2.4 and 12.4 Note
1391 that explicit ~int is caught in the parser; this deals with typedefs
1392 and template parms. */
1393 if (TREE_CODE (name) == BIT_NOT_EXPR && ! is_aggr_typedef (basename, 0))
1394 {
1395 if (type != basetype)
1396 cp_error ("type of `%E' does not match destructor type `%T' (type was `%T')",
1397 exp, basetype, type);
1398 name = TREE_OPERAND (name, 0);
1399 if (basetype != get_type_value (name))
1400 cp_error ("qualified type `%T' does not match destructor name `~%T'",
1401 basetype, name);
1402 return convert (void_type_node, exp);
1403 }
1404
1405 if (! is_aggr_typedef (basename, 1))
1406 return error_mark_node;
1407
1408 if (! IS_AGGR_TYPE (type))
1409 {
1410 cp_error ("base object `%E' of scoped method call is of non-aggregate type `%T'",
1411 exp, type);
1412 return error_mark_node;
1413 }
1414
1415 if ((binfo = binfo_or_else (basetype, type)))
1416 {
1417 if (binfo == error_mark_node)
1418 return error_mark_node;
1419 if (TREE_CODE (exp) == INDIRECT_REF)
1420 decl = build_indirect_ref (convert_pointer_to (binfo,
1421 build_unary_op (ADDR_EXPR, exp, 0)), NULL_PTR);
1422 else
1423 decl = build_scoped_ref (exp, scopes);
1424
1425 /* Call to a destructor. */
1426 if (TREE_CODE (name) == BIT_NOT_EXPR)
1427 {
1428 /* Explicit call to destructor. */
1429 name = TREE_OPERAND (name, 0);
1430 if (! (name == constructor_name (TREE_TYPE (decl))
1431 || TREE_TYPE (decl) == get_type_value (name)))
1432 {
1433 cp_error
1434 ("qualified type `%T' does not match destructor name `~%T'",
1435 TREE_TYPE (decl), name);
1436 return error_mark_node;
1437 }
1438 if (! TYPE_HAS_DESTRUCTOR (TREE_TYPE (decl)))
1439 return convert (void_type_node, exp);
1440
1441 return build_delete (TREE_TYPE (decl), decl, integer_two_node,
1442 LOOKUP_NORMAL|LOOKUP_NONVIRTUAL|LOOKUP_DESTRUCTOR,
1443 0);
1444 }
1445
1446 /* Call to a method. */
1447 return build_method_call (decl, name, parms, binfo,
1448 LOOKUP_NORMAL|LOOKUP_NONVIRTUAL);
1449 }
1450 return error_mark_node;
1451 }
1452
1453 static void
1454 print_candidates (candidates)
1455 tree candidates;
1456 {
1457 cp_error_at ("candidates are: %D", TREE_VALUE (candidates));
1458 candidates = TREE_CHAIN (candidates);
1459
1460 while (candidates)
1461 {
1462 cp_error_at (" %D", TREE_VALUE (candidates));
1463 candidates = TREE_CHAIN (candidates);
1464 }
1465 }
1466
1467 static void
1468 print_n_candidates (candidates, n)
1469 struct candidate *candidates;
1470 int n;
1471 {
1472 int i;
1473
1474 cp_error_at ("candidates are: %D", candidates[0].function);
1475 for (i = 1; i < n; i++)
1476 cp_error_at (" %D", candidates[i].function);
1477 }
1478
1479 /* Build something of the form ptr->method (args)
1480 or object.method (args). This can also build
1481 calls to constructors, and find friends.
1482
1483 Member functions always take their class variable
1484 as a pointer.
1485
1486 INSTANCE is a class instance.
1487
1488 NAME is the name of the method desired, usually an IDENTIFIER_NODE.
1489
1490 PARMS help to figure out what that NAME really refers to.
1491
1492 BASETYPE_PATH, if non-NULL, contains a chain from the type of INSTANCE
1493 down to the real instance type to use for access checking. We need this
1494 information to get protected accesses correct. This parameter is used
1495 by build_member_call.
1496
1497 FLAGS is the logical disjunction of zero or more LOOKUP_
1498 flags. See cp-tree.h for more info.
1499
1500 If this is all OK, calls build_function_call with the resolved
1501 member function.
1502
1503 This function must also handle being called to perform
1504 initialization, promotion/coercion of arguments, and
1505 instantiation of default parameters.
1506
1507 Note that NAME may refer to an instance variable name. If
1508 `operator()()' is defined for the type of that field, then we return
1509 that result. */
1510 tree
1511 build_method_call (instance, name, parms, basetype_path, flags)
1512 tree instance, name, parms, basetype_path;
1513 int flags;
1514 {
1515 register tree function, fntype, value_type;
1516 register tree basetype, save_basetype;
1517 register tree baselink, result, method_name, parmtypes, parm;
1518 tree last;
1519 int pass;
1520 enum access_type access = access_public;
1521
1522 /* Range of cases for vtable optimization. */
1523 enum vtable_needs { not_needed, maybe_needed, unneeded, needed };
1524 enum vtable_needs need_vtbl = not_needed;
1525
1526 char *name_kind;
1527 int ever_seen = 0;
1528 tree instance_ptr = NULL_TREE;
1529 int all_virtual = flag_all_virtual;
1530 int static_call_context = 0;
1531 tree found_fns = NULL_TREE;
1532
1533 /* Keep track of `const' and `volatile' objects. */
1534 int constp, volatilep;
1535
1536 #ifdef GATHER_STATISTICS
1537 n_build_method_call++;
1538 #endif
1539
1540 if (instance == error_mark_node
1541 || name == error_mark_node
1542 || parms == error_mark_node
1543 || (instance != NULL_TREE && TREE_TYPE (instance) == error_mark_node))
1544 return error_mark_node;
1545
1546 /* This is the logic that magically deletes the second argument to
1547 operator delete, if it is not needed. */
1548 if (name == ansi_opname[(int) DELETE_EXPR] && list_length (parms)==2)
1549 {
1550 tree save_last = TREE_CHAIN (parms);
1551 tree result;
1552 /* get rid of unneeded argument */
1553 TREE_CHAIN (parms) = NULL_TREE;
1554 result = build_method_call (instance, name, parms, basetype_path,
1555 (LOOKUP_SPECULATIVELY|flags)
1556 &~LOOKUP_COMPLAIN);
1557 /* If it finds a match, return it. */
1558 if (result)
1559 return build_method_call (instance, name, parms, basetype_path, flags);
1560 /* If it doesn't work, two argument delete must work */
1561 TREE_CHAIN (parms) = save_last;
1562 }
1563 /* We already know whether it's needed or not for vec delete. */
1564 else if (name == ansi_opname[(int) VEC_DELETE_EXPR]
1565 && ! TYPE_VEC_DELETE_TAKES_SIZE (TREE_TYPE (instance)))
1566 TREE_CHAIN (parms) = NULL_TREE;
1567
1568 if (TREE_CODE (name) == BIT_NOT_EXPR)
1569 {
1570 flags |= LOOKUP_DESTRUCTOR;
1571 name = TREE_OPERAND (name, 0);
1572 if (parms)
1573 error ("destructors take no parameters");
1574 basetype = TREE_TYPE (instance);
1575 if (TREE_CODE (basetype) == REFERENCE_TYPE)
1576 basetype = TREE_TYPE (basetype);
1577 if (! ((IS_AGGR_TYPE (basetype)
1578 && name == constructor_name (basetype))
1579 || basetype == get_type_value (name)))
1580 {
1581 cp_error ("destructor name `~%D' does not match type `%T' of expression",
1582 name, basetype);
1583 return convert (void_type_node, instance);
1584 }
1585
1586 if (! TYPE_HAS_DESTRUCTOR (basetype))
1587 return convert (void_type_node, instance);
1588 instance = default_conversion (instance);
1589 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1590 return build_delete (build_pointer_type (basetype),
1591 instance_ptr, integer_two_node,
1592 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0);
1593 }
1594
1595 {
1596 char *xref_name;
1597
1598 /* Initialize name for error reporting. */
1599 if (IDENTIFIER_OPNAME_P (name) && ! IDENTIFIER_TYPENAME_P (name))
1600 {
1601 char *p = operator_name_string (name);
1602 xref_name = (char *)alloca (strlen (p) + 10);
1603 sprintf (xref_name, "operator %s", p);
1604 }
1605 else if (TREE_CODE (name) == SCOPE_REF)
1606 xref_name = IDENTIFIER_POINTER (TREE_OPERAND (name, 1));
1607 else
1608 xref_name = IDENTIFIER_POINTER (name);
1609
1610 GNU_xref_call (current_function_decl, xref_name);
1611 }
1612
1613 if (instance == NULL_TREE)
1614 {
1615 basetype = NULL_TREE;
1616 /* Check cases where this is really a call to raise
1617 an exception. */
1618 if (current_class_type && TREE_CODE (name) == IDENTIFIER_NODE)
1619 {
1620 basetype = purpose_member (name, CLASSTYPE_TAGS (current_class_type));
1621 if (basetype)
1622 basetype = TREE_VALUE (basetype);
1623 }
1624 else if (TREE_CODE (name) == SCOPE_REF
1625 && TREE_CODE (TREE_OPERAND (name, 0)) == IDENTIFIER_NODE)
1626 {
1627 if (! is_aggr_typedef (TREE_OPERAND (name, 0), 1))
1628 return error_mark_node;
1629 basetype = purpose_member (TREE_OPERAND (name, 1),
1630 CLASSTYPE_TAGS (IDENTIFIER_TYPE_VALUE (TREE_OPERAND (name, 0))));
1631 if (basetype)
1632 basetype = TREE_VALUE (basetype);
1633 }
1634
1635 if (basetype != NULL_TREE)
1636 ;
1637 /* call to a constructor... */
1638 else if (basetype_path)
1639 basetype = BINFO_TYPE (basetype_path);
1640 else if (IDENTIFIER_HAS_TYPE_VALUE (name))
1641 {
1642 basetype = IDENTIFIER_TYPE_VALUE (name);
1643 name = constructor_name_full (basetype);
1644 }
1645 else
1646 {
1647 tree typedef_name = lookup_name (name, 1);
1648 if (typedef_name && TREE_CODE (typedef_name) == TYPE_DECL)
1649 {
1650 /* Canonicalize the typedef name. */
1651 basetype = TREE_TYPE (typedef_name);
1652 name = TYPE_IDENTIFIER (basetype);
1653 }
1654 else
1655 {
1656 cp_error ("no constructor named `%T' in scope",
1657 name);
1658 return error_mark_node;
1659 }
1660 }
1661
1662 if (! IS_AGGR_TYPE (basetype))
1663 {
1664 non_aggr_error:
1665 if ((flags & LOOKUP_COMPLAIN) && TREE_CODE (basetype) != ERROR_MARK)
1666 cp_error ("request for member `%D' in `%E', which is of non-aggregate type `%T'",
1667 name, instance, basetype);
1668
1669 return error_mark_node;
1670 }
1671 }
1672 else if (instance == C_C_D || instance == current_class_decl)
1673 {
1674 /* When doing initialization, we side-effect the TREE_TYPE of
1675 C_C_D, hence we cannot set up BASETYPE from CURRENT_CLASS_TYPE. */
1676 basetype = TREE_TYPE (C_C_D);
1677
1678 /* Anything manifestly `this' in constructors and destructors
1679 has a known type, so virtual function tables are not needed. */
1680 if (TYPE_VIRTUAL_P (basetype)
1681 && !(flags & LOOKUP_NONVIRTUAL))
1682 need_vtbl = (dtor_label || ctor_label)
1683 ? unneeded : maybe_needed;
1684
1685 instance = C_C_D;
1686 instance_ptr = current_class_decl;
1687 result = build_field_call (TYPE_BINFO (current_class_type),
1688 instance_ptr, name, parms);
1689
1690 if (result)
1691 return result;
1692 }
1693 else if (TREE_CODE (instance) == RESULT_DECL)
1694 {
1695 basetype = TREE_TYPE (instance);
1696 /* Should we ever have to make a virtual function reference
1697 from a RESULT_DECL, know that it must be of fixed type
1698 within the scope of this function. */
1699 if (!(flags & LOOKUP_NONVIRTUAL) && TYPE_VIRTUAL_P (basetype))
1700 need_vtbl = maybe_needed;
1701 instance_ptr = build1 (ADDR_EXPR, TYPE_POINTER_TO (basetype), instance);
1702 }
1703 else
1704 {
1705 /* The MAIN_VARIANT of the type that `instance_ptr' winds up being. */
1706 tree inst_ptr_basetype;
1707
1708 static_call_context =
1709 (TREE_CODE (instance) == INDIRECT_REF
1710 && TREE_CODE (TREE_OPERAND (instance, 0)) == NOP_EXPR
1711 && TREE_OPERAND (TREE_OPERAND (instance, 0), 0) == error_mark_node);
1712
1713 if (TREE_CODE (instance) == OFFSET_REF)
1714 instance = resolve_offset_ref (instance);
1715
1716 /* the base type of an instance variable is pointer to class */
1717 basetype = TREE_TYPE (instance);
1718
1719 if (TREE_CODE (basetype) == REFERENCE_TYPE)
1720 {
1721 basetype = TREE_TYPE (basetype);
1722 if (! IS_AGGR_TYPE (basetype))
1723 goto non_aggr_error;
1724 /* Call to convert not needed because we are remaining
1725 within the same type. */
1726 instance_ptr = build1 (NOP_EXPR, build_pointer_type (basetype),
1727 instance);
1728 inst_ptr_basetype = TYPE_MAIN_VARIANT (basetype);
1729 }
1730 else
1731 {
1732 if (! IS_AGGR_TYPE (basetype))
1733 goto non_aggr_error;
1734
1735 /* If `instance' is a signature pointer/reference and `name' is
1736 not a constructor, we are calling a signature member function.
1737 In that case set the `basetype' to the signature type. */
1738 if ((IS_SIGNATURE_POINTER (basetype)
1739 || IS_SIGNATURE_REFERENCE (basetype))
1740 && TYPE_IDENTIFIER (basetype) != name)
1741 basetype = SIGNATURE_TYPE (basetype);
1742
1743 if ((IS_SIGNATURE (basetype)
1744 && (instance_ptr = build_optr_ref (instance)))
1745 || (lvalue_p (instance)
1746 && (instance_ptr = build_unary_op (ADDR_EXPR, instance, 0)))
1747 || (instance_ptr = unary_complex_lvalue (ADDR_EXPR, instance)))
1748 {
1749 if (instance_ptr == error_mark_node)
1750 return error_mark_node;
1751 }
1752 else if (TREE_CODE (instance) == NOP_EXPR
1753 || TREE_CODE (instance) == CONSTRUCTOR)
1754 {
1755 /* A cast is not an lvalue. Initialize a fresh temp
1756 with the value we are casting from, and proceed with
1757 that temporary. We can't cast to a reference type,
1758 so that simplifies the initialization to something
1759 we can manage. */
1760 tree temp = get_temp_name (TREE_TYPE (instance), 0);
1761 if (IS_AGGR_TYPE (TREE_TYPE (instance)))
1762 expand_aggr_init (temp, instance, 0, flags);
1763 else
1764 {
1765 store_init_value (temp, instance);
1766 expand_decl_init (temp);
1767 }
1768 instance = temp;
1769 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1770 }
1771 else
1772 {
1773 if (TREE_CODE (instance) != CALL_EXPR
1774 #ifdef PCC_STATIC_STRUCT_RETURN
1775 && TREE_CODE (instance) != RTL_EXPR
1776 #endif
1777 )
1778 my_friendly_abort (125);
1779 if (TYPE_NEEDS_CONSTRUCTING (basetype))
1780 instance = build_cplus_new (basetype, instance, 0);
1781 else
1782 {
1783 instance = get_temp_name (basetype, 0);
1784 TREE_ADDRESSABLE (instance) = 1;
1785 }
1786 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1787 }
1788 /* @@ Should we call comp_target_types here? */
1789 inst_ptr_basetype = TREE_TYPE (TREE_TYPE (instance_ptr));
1790 if (TYPE_MAIN_VARIANT (basetype) == TYPE_MAIN_VARIANT (inst_ptr_basetype))
1791 basetype = inst_ptr_basetype;
1792 else
1793 {
1794 instance_ptr = convert (TYPE_POINTER_TO (basetype), instance_ptr);
1795 if (instance_ptr == error_mark_node)
1796 return error_mark_node;
1797 }
1798 }
1799
1800 /* After converting `instance_ptr' above, `inst_ptr_basetype' was
1801 not updated, so we use `basetype' instead. */
1802 if (basetype_path == NULL_TREE
1803 && IS_SIGNATURE (basetype))
1804 basetype_path = TYPE_BINFO (basetype);
1805 else if (basetype_path == NULL_TREE ||
1806 BINFO_TYPE (basetype_path) != TYPE_MAIN_VARIANT (inst_ptr_basetype))
1807 basetype_path = TYPE_BINFO (inst_ptr_basetype);
1808
1809 result = build_field_call (basetype_path, instance_ptr, name, parms);
1810 if (result)
1811 return result;
1812
1813 if (!(flags & LOOKUP_NONVIRTUAL) && TYPE_VIRTUAL_P (basetype))
1814 {
1815 if (TREE_SIDE_EFFECTS (instance_ptr))
1816 {
1817 /* This action is needed because the instance is needed
1818 for providing the base of the virtual function table.
1819 Without using a SAVE_EXPR, the function we are building
1820 may be called twice, or side effects on the instance
1821 variable (such as a post-increment), may happen twice. */
1822 instance_ptr = save_expr (instance_ptr);
1823 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1824 }
1825 else if (TREE_CODE (TREE_TYPE (instance)) == POINTER_TYPE)
1826 {
1827 /* This happens when called for operator new (). */
1828 instance = build_indirect_ref (instance, NULL_PTR);
1829 }
1830
1831 need_vtbl = maybe_needed;
1832 }
1833 }
1834
1835 if (TYPE_SIZE (basetype) == 0)
1836 {
1837 /* This is worth complaining about, I think. */
1838 cp_error ("cannot lookup method in incomplete type `%T'", basetype);
1839 return error_mark_node;
1840 }
1841
1842 save_basetype = TYPE_MAIN_VARIANT (basetype);
1843
1844 #if 0
1845 if (all_virtual == 1
1846 && (! strncmp (IDENTIFIER_POINTER (name), OPERATOR_METHOD_FORMAT,
1847 OPERATOR_METHOD_LENGTH)
1848 || instance_ptr == NULL_TREE
1849 || (TYPE_OVERLOADS_METHOD_CALL_EXPR (basetype) == 0)))
1850 all_virtual = 0;
1851 #endif
1852
1853 last = NULL_TREE;
1854 for (parmtypes = NULL_TREE, parm = parms; parm; parm = TREE_CHAIN (parm))
1855 {
1856 tree t = TREE_TYPE (TREE_VALUE (parm));
1857 if (TREE_CODE (t) == OFFSET_TYPE)
1858 {
1859 /* Convert OFFSET_TYPE entities to their normal selves. */
1860 TREE_VALUE (parm) = resolve_offset_ref (TREE_VALUE (parm));
1861 t = TREE_TYPE (TREE_VALUE (parm));
1862 }
1863 if (TREE_CODE (TREE_VALUE (parm)) == OFFSET_REF
1864 && TREE_CODE (t) == METHOD_TYPE)
1865 {
1866 TREE_VALUE (parm) = build_unary_op (ADDR_EXPR, TREE_VALUE (parm), 0);
1867 }
1868 #if 0
1869 /* This breaks reference-to-array parameters. */
1870 if (TREE_CODE (t) == ARRAY_TYPE)
1871 {
1872 /* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
1873 This eliminates needless calls to `compute_conversion_costs'. */
1874 TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
1875 t = TREE_TYPE (TREE_VALUE (parm));
1876 }
1877 #endif
1878 if (t == error_mark_node)
1879 return error_mark_node;
1880 last = build_tree_list (NULL_TREE, t);
1881 parmtypes = chainon (parmtypes, last);
1882 }
1883
1884 if (instance)
1885 {
1886 /* TREE_READONLY (instance) fails for references. */
1887 constp = TYPE_READONLY (TREE_TYPE (TREE_TYPE (instance_ptr)));
1888 volatilep = TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (instance_ptr)));
1889 parms = tree_cons (NULL_TREE, instance_ptr, parms);
1890 }
1891 else
1892 {
1893 /* Raw constructors are always in charge. */
1894 if (TYPE_USES_VIRTUAL_BASECLASSES (basetype)
1895 && ! (flags & LOOKUP_HAS_IN_CHARGE))
1896 {
1897 flags |= LOOKUP_HAS_IN_CHARGE;
1898 parms = tree_cons (NULL_TREE, integer_one_node, parms);
1899 parmtypes = tree_cons (NULL_TREE, integer_type_node, parmtypes);
1900 }
1901
1902 if (flag_this_is_variable > 0)
1903 {
1904 constp = 0;
1905 volatilep = 0;
1906 instance_ptr = build_int_2 (0, 0);
1907 TREE_TYPE (instance_ptr) = TYPE_POINTER_TO (basetype);
1908 parms = tree_cons (NULL_TREE, instance_ptr, parms);
1909 }
1910 else
1911 {
1912 constp = 0;
1913 volatilep = 0;
1914 instance_ptr = build_new (NULL_TREE, basetype, void_type_node, 0);
1915 if (instance_ptr == error_mark_node)
1916 return error_mark_node;
1917 instance_ptr = save_expr (instance_ptr);
1918 TREE_CALLS_NEW (instance_ptr) = 1;
1919 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1920
1921 #if 0
1922 /* This breaks initialization of a reference from a new
1923 expression of a different type. And it doesn't appear to
1924 serve its original purpose any more, either. jason 10/12/94 */
1925 /* If it's a default argument initialized from a ctor, what we get
1926 from instance_ptr will match the arglist for the FUNCTION_DECL
1927 of the constructor. */
1928 if (parms && TREE_CODE (TREE_VALUE (parms)) == CALL_EXPR
1929 && TREE_OPERAND (TREE_VALUE (parms), 1)
1930 && TREE_CALLS_NEW (TREE_VALUE (TREE_OPERAND (TREE_VALUE (parms), 1))))
1931 parms = build_tree_list (NULL_TREE, instance_ptr);
1932 else
1933 #endif
1934 parms = tree_cons (NULL_TREE, instance_ptr, parms);
1935 }
1936 }
1937
1938 parmtypes = tree_cons (NULL_TREE, TREE_TYPE (instance_ptr), parmtypes);
1939
1940 if (last == NULL_TREE)
1941 last = parmtypes;
1942
1943 /* Look up function name in the structure type definition. */
1944
1945 if ((IDENTIFIER_HAS_TYPE_VALUE (name)
1946 && ! IDENTIFIER_OPNAME_P (name)
1947 && IS_AGGR_TYPE (IDENTIFIER_TYPE_VALUE (name))
1948 && TREE_CODE (IDENTIFIER_TYPE_VALUE (name)) != UNINSTANTIATED_P_TYPE)
1949 || name == constructor_name (basetype))
1950 {
1951 tree tmp = NULL_TREE;
1952 if (IDENTIFIER_TYPE_VALUE (name) == basetype
1953 || name == constructor_name (basetype))
1954 tmp = TYPE_BINFO (basetype);
1955 else
1956 tmp = get_binfo (IDENTIFIER_TYPE_VALUE (name), basetype, 0);
1957
1958 if (tmp != NULL_TREE)
1959 {
1960 name_kind = "constructor";
1961
1962 if (TYPE_USES_VIRTUAL_BASECLASSES (basetype)
1963 && ! (flags & LOOKUP_HAS_IN_CHARGE))
1964 {
1965 /* Constructors called for initialization
1966 only are never in charge. */
1967 tree tmplist;
1968
1969 flags |= LOOKUP_HAS_IN_CHARGE;
1970 tmplist = tree_cons (NULL_TREE, integer_zero_node,
1971 TREE_CHAIN (parms));
1972 TREE_CHAIN (parms) = tmplist;
1973 tmplist = tree_cons (NULL_TREE, integer_type_node, TREE_CHAIN (parmtypes));
1974 TREE_CHAIN (parmtypes) = tmplist;
1975 }
1976 basetype = BINFO_TYPE (tmp);
1977 }
1978 else
1979 name_kind = "method";
1980 }
1981 else
1982 name_kind = "method";
1983
1984 if (basetype_path == NULL_TREE
1985 || BINFO_TYPE (basetype_path) != TYPE_MAIN_VARIANT (basetype))
1986 basetype_path = TYPE_BINFO (basetype);
1987 result = lookup_fnfields (basetype_path, name,
1988 (flags & LOOKUP_COMPLAIN));
1989 if (result == error_mark_node)
1990 return error_mark_node;
1991
1992
1993 #if 0
1994 /* Now, go look for this method name. We do not find destructors here.
1995
1996 Putting `void_list_node' on the end of the parmtypes
1997 fakes out `build_decl_overload' into doing the right thing. */
1998 TREE_CHAIN (last) = void_list_node;
1999 method_name = build_decl_overload (name, parmtypes,
2000 1 + (name == constructor_name (save_basetype)
2001 || name == constructor_name_full (save_basetype)));
2002 TREE_CHAIN (last) = NULL_TREE;
2003 #endif
2004
2005 for (pass = 0; pass < 2; pass++)
2006 {
2007 struct candidate *candidates;
2008 struct candidate *cp;
2009 int len;
2010 unsigned best = 1;
2011
2012 /* This increments every time we go up the type hierarchy.
2013 The idea is to prefer a function of the derived class if possible. */
2014 int b_or_d = 0;
2015
2016 baselink = result;
2017
2018 if (pass > 0)
2019 {
2020 candidates
2021 = (struct candidate *) alloca ((ever_seen+1)
2022 * sizeof (struct candidate));
2023 bzero ((char *) candidates, (ever_seen + 1) * sizeof (struct candidate));
2024 cp = candidates;
2025 len = list_length (parms);
2026 ever_seen = 0;
2027
2028 /* First see if a global function has a shot at it. */
2029 if (flags & LOOKUP_GLOBAL)
2030 {
2031 tree friend_parms;
2032 tree parm = instance_ptr;
2033
2034 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE)
2035 {
2036 /* TREE_VALUE (parms) may have been modified by now;
2037 restore it to its original value. */
2038 TREE_VALUE (parms) = parm;
2039 friend_parms = parms;
2040 }
2041 else if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2042 {
2043 tree new_type;
2044 parm = build_indirect_ref (parm, "friendifying parms (compiler error)");
2045 new_type = cp_build_type_variant (TREE_TYPE (parm), constp,
2046 volatilep);
2047 new_type = build_reference_type (new_type);
2048 parm = convert (new_type, parm);
2049 friend_parms = tree_cons (NULL_TREE, parm, TREE_CHAIN (parms));
2050 }
2051 else
2052 my_friendly_abort (167);
2053
2054 cp->h_len = len;
2055 cp->harshness = (struct harshness_code *)
2056 alloca ((len + 1) * sizeof (struct harshness_code));
2057
2058 result = build_overload_call (name, friend_parms, 0, cp);
2059 /* If it turns out to be the one we were actually looking for
2060 (it was probably a friend function), the return the
2061 good result. */
2062 if (TREE_CODE (result) == CALL_EXPR)
2063 return result;
2064
2065 while ((cp->h.code & EVIL_CODE) == 0)
2066 {
2067 /* non-standard uses: set the field to 0 to indicate
2068 we are using a non-member function. */
2069 cp->u.field = 0;
2070 if (cp->harshness[len].distance == 0
2071 && cp->h.code < best)
2072 best = cp->h.code;
2073 cp += 1;
2074 }
2075 }
2076 }
2077
2078 while (baselink)
2079 {
2080 /* We have a hit (of sorts). If the parameter list is
2081 "error_mark_node", or some variant thereof, it won't
2082 match any methods. Since we have verified that the is
2083 some method vaguely matching this one (in name at least),
2084 silently return.
2085
2086 Don't stop for friends, however. */
2087 basetype_path = TREE_PURPOSE (baselink);
2088
2089 function = TREE_VALUE (baselink);
2090 if (TREE_CODE (basetype_path) == TREE_LIST)
2091 basetype_path = TREE_VALUE (basetype_path);
2092 basetype = BINFO_TYPE (basetype_path);
2093
2094 #if 0
2095 /* Cast the instance variable if necessary. */
2096 if (basetype != TYPE_MAIN_VARIANT
2097 (TREE_TYPE (TREE_TYPE (TREE_VALUE (parms)))))
2098 {
2099 if (basetype == save_basetype)
2100 TREE_VALUE (parms) = instance_ptr;
2101 else
2102 {
2103 tree type = build_pointer_type
2104 (build_type_variant (basetype, constp, volatilep));
2105 TREE_VALUE (parms) = convert_force (type, instance_ptr, 0);
2106 }
2107 }
2108
2109 /* FIXME: this is the wrong place to get an error. Hopefully
2110 the access-control rewrite will make this change more cleanly. */
2111 if (TREE_VALUE (parms) == error_mark_node)
2112 return error_mark_node;
2113 #endif
2114
2115 if (DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (function)))
2116 function = DECL_CHAIN (function);
2117
2118 for (; function; function = DECL_CHAIN (function))
2119 {
2120 #ifdef GATHER_STATISTICS
2121 n_inner_fields_searched++;
2122 #endif
2123 ever_seen++;
2124 if (pass > 0)
2125 found_fns = tree_cons (NULL_TREE, function, found_fns);
2126
2127 /* Not looking for friends here. */
2128 if (TREE_CODE (TREE_TYPE (function)) == FUNCTION_TYPE
2129 && ! DECL_STATIC_FUNCTION_P (function))
2130 continue;
2131
2132 #if 0
2133 if (pass == 0
2134 && DECL_ASSEMBLER_NAME (function) == method_name)
2135 goto found;
2136 #endif
2137
2138 if (pass > 0)
2139 {
2140 tree these_parms = parms;
2141
2142 #ifdef GATHER_STATISTICS
2143 n_inner_fields_searched++;
2144 #endif
2145 cp->h_len = len;
2146 cp->harshness = (struct harshness_code *)
2147 alloca ((len + 1) * sizeof (struct harshness_code));
2148
2149 if (DECL_STATIC_FUNCTION_P (function))
2150 these_parms = TREE_CHAIN (these_parms);
2151 compute_conversion_costs (function, these_parms, cp, len);
2152
2153 if ((cp->h.code & EVIL_CODE) == 0)
2154 {
2155 cp->u.field = function;
2156 cp->function = function;
2157 cp->basetypes = basetype_path;
2158
2159 /* Don't allow non-converting constructors to convert. */
2160 if (flags & LOOKUP_ONLYCONVERTING
2161 && DECL_LANG_SPECIFIC (function)
2162 && DECL_NONCONVERTING_P (function))
2163 continue;
2164
2165 /* No "two-level" conversions. */
2166 if (flags & LOOKUP_NO_CONVERSION
2167 && (cp->h.code & USER_CODE))
2168 continue;
2169
2170 cp++;
2171 }
2172 }
2173 }
2174 /* Now we have run through one link's member functions.
2175 arrange to head-insert this link's links. */
2176 baselink = next_baselink (baselink);
2177 b_or_d += 1;
2178 /* Don't grab functions from base classes. lookup_fnfield will
2179 do the work to get us down into the right place. */
2180 baselink = NULL_TREE;
2181 }
2182 if (pass == 0)
2183 {
2184 tree igv = lookup_name_nonclass (name);
2185
2186 /* No exact match could be found. Now try to find match
2187 using default conversions. */
2188 if ((flags & LOOKUP_GLOBAL) && igv)
2189 {
2190 if (TREE_CODE (igv) == FUNCTION_DECL)
2191 ever_seen += 1;
2192 else if (TREE_CODE (igv) == TREE_LIST)
2193 ever_seen += count_functions (igv);
2194 }
2195
2196 if (ever_seen == 0)
2197 {
2198 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2199 == LOOKUP_SPECULATIVELY)
2200 return NULL_TREE;
2201
2202 TREE_CHAIN (last) = void_list_node;
2203 if (flags & LOOKUP_GLOBAL)
2204 cp_error ("no global or member function `%D(%A)' defined",
2205 name, parmtypes);
2206 else
2207 cp_error ("no member function `%T::%D(%A)' defined",
2208 save_basetype, name, TREE_CHAIN (parmtypes));
2209 return error_mark_node;
2210 }
2211 continue;
2212 }
2213
2214 if (cp - candidates != 0)
2215 {
2216 /* Rank from worst to best. Then cp will point to best one.
2217 Private fields have their bits flipped. For unsigned
2218 numbers, this should make them look very large.
2219 If the best alternate has a (signed) negative value,
2220 then all we ever saw were private members. */
2221 if (cp - candidates > 1)
2222 {
2223 int n_candidates = cp - candidates;
2224 extern int warn_synth;
2225 TREE_VALUE (parms) = instance_ptr;
2226 cp = ideal_candidate (save_basetype, candidates,
2227 n_candidates, parms, len);
2228 if (cp == (struct candidate *)0)
2229 {
2230 if (flags & LOOKUP_COMPLAIN)
2231 {
2232 TREE_CHAIN (last) = void_list_node;
2233 cp_error ("call of overloaded %s `%D(%A)' is ambiguous",
2234 name_kind, name, TREE_CHAIN (parmtypes));
2235 print_n_candidates (candidates, n_candidates);
2236 }
2237 return error_mark_node;
2238 }
2239 if (cp->h.code & EVIL_CODE)
2240 return error_mark_node;
2241 if (warn_synth
2242 && DECL_NAME (cp->function) == ansi_opname[MODIFY_EXPR]
2243 && DECL_ARTIFICIAL (cp->function)
2244 && n_candidates == 2)
2245 {
2246 cp_warning ("using synthesized `%#D' for copy assignment",
2247 cp->function);
2248 cp_warning_at (" where cfront would use `%#D'",
2249 candidates->function);
2250 }
2251 }
2252 else if (cp[-1].h.code & EVIL_CODE)
2253 {
2254 if (flags & LOOKUP_COMPLAIN)
2255 cp_error ("ambiguous type conversion requested for %s `%D'",
2256 name_kind, name);
2257 return error_mark_node;
2258 }
2259 else
2260 cp--;
2261
2262 /* The global function was the best, so use it. */
2263 if (cp->u.field == 0)
2264 {
2265 /* We must convert the instance pointer into a reference type.
2266 Global overloaded functions can only either take
2267 aggregate objects (which come for free from references)
2268 or reference data types anyway. */
2269 TREE_VALUE (parms) = copy_node (instance_ptr);
2270 TREE_TYPE (TREE_VALUE (parms)) = build_reference_type (TREE_TYPE (TREE_TYPE (instance_ptr)));
2271 return build_function_call (cp->function, parms);
2272 }
2273
2274 function = cp->function;
2275 basetype_path = cp->basetypes;
2276 if (! DECL_STATIC_FUNCTION_P (function))
2277 TREE_VALUE (parms) = cp->arg;
2278 goto found_and_maybe_warn;
2279 }
2280
2281 if (flags & (LOOKUP_COMPLAIN|LOOKUP_SPECULATIVELY))
2282 {
2283 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2284 == LOOKUP_SPECULATIVELY)
2285 return NULL_TREE;
2286
2287 if (DECL_STATIC_FUNCTION_P (cp->function))
2288 parms = TREE_CHAIN (parms);
2289 if (ever_seen)
2290 {
2291 if (flags & LOOKUP_SPECULATIVELY)
2292 return NULL_TREE;
2293 if (static_call_context
2294 && TREE_CODE (TREE_TYPE (cp->function)) == METHOD_TYPE)
2295 cp_error ("object missing in call to `%D'", cp->function);
2296 else if (ever_seen > 1)
2297 {
2298 TREE_CHAIN (last) = void_list_node;
2299 cp_error ("no matching function for call to `%T::%D (%A)'",
2300 TREE_TYPE (TREE_TYPE (instance_ptr)),
2301 name, TREE_CHAIN (parmtypes));
2302 TREE_CHAIN (last) = NULL_TREE;
2303 print_candidates (found_fns);
2304 }
2305 else
2306 report_type_mismatch (cp, parms, name_kind);
2307 return error_mark_node;
2308 }
2309
2310 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2311 == LOOKUP_COMPLAIN)
2312 {
2313 cp_error ("%T has no method named %D", save_basetype, name);
2314 return error_mark_node;
2315 }
2316 return NULL_TREE;
2317 }
2318 continue;
2319
2320 found_and_maybe_warn:
2321 if ((cp->harshness[0].code & CONST_CODE)
2322 /* 12.1p2: Constructors can be called for const objects. */
2323 && ! DECL_CONSTRUCTOR_P (cp->function))
2324 {
2325 if (flags & LOOKUP_COMPLAIN)
2326 {
2327 cp_error_at ("non-const member function `%D'", cp->function);
2328 error ("called for const object at this point in file");
2329 }
2330 /* Not good enough for a match. */
2331 else
2332 return error_mark_node;
2333 }
2334 goto found;
2335 }
2336 /* Silently return error_mark_node. */
2337 return error_mark_node;
2338
2339 found:
2340 if (flags & LOOKUP_PROTECT)
2341 access = compute_access (basetype_path, function);
2342
2343 if (access == access_private)
2344 {
2345 if (flags & LOOKUP_COMPLAIN)
2346 {
2347 cp_error_at ("%s `%+#D' is %s", name_kind, function,
2348 TREE_PRIVATE (function) ? "private"
2349 : "from private base class");
2350 error ("within this context");
2351 }
2352 return error_mark_node;
2353 }
2354 else if (access == access_protected)
2355 {
2356 if (flags & LOOKUP_COMPLAIN)
2357 {
2358 cp_error_at ("%s `%+#D' %s", name_kind, function,
2359 TREE_PROTECTED (function) ? "is protected"
2360 : "has protected accessibility");
2361 error ("within this context");
2362 }
2363 return error_mark_node;
2364 }
2365
2366 /* From here on down, BASETYPE is the type that INSTANCE_PTR's
2367 type (if it exists) is a pointer to. */
2368
2369 if (DECL_ABSTRACT_VIRTUAL_P (function)
2370 && instance == C_C_D
2371 && DECL_CONSTRUCTOR_P (current_function_decl)
2372 && ! (flags & LOOKUP_NONVIRTUAL)
2373 && value_member (function, get_abstract_virtuals (basetype)))
2374 cp_error ("abstract virtual `%#D' called from constructor", function);
2375
2376 if (IS_SIGNATURE (basetype) && static_call_context)
2377 {
2378 cp_error ("cannot call signature member function `%T::%D' without signature pointer/reference",
2379 basetype, name);
2380 return error_mark_node;
2381 }
2382 else if (IS_SIGNATURE (basetype))
2383 return build_signature_method_call (basetype, instance, function, parms);
2384
2385 function = DECL_MAIN_VARIANT (function);
2386 /* Declare external function if necessary. */
2387 assemble_external (function);
2388
2389 #if 0
2390 if (DECL_ARTIFICIAL (function) && ! flag_no_inline
2391 && DECL_SAVED_INSNS (function) == 0
2392 && ! TREE_ASM_WRITTEN (function))
2393 synthesize_method (function);
2394 #endif
2395
2396 fntype = TREE_TYPE (function);
2397 if (TREE_CODE (fntype) == POINTER_TYPE)
2398 fntype = TREE_TYPE (fntype);
2399 basetype = DECL_CLASS_CONTEXT (function);
2400
2401 /* If we are referencing a virtual function from an object
2402 of effectively static type, then there is no need
2403 to go through the virtual function table. */
2404 if (need_vtbl == maybe_needed)
2405 {
2406 int fixed_type = resolves_to_fixed_type_p (instance, 0);
2407
2408 if (all_virtual == 1
2409 && DECL_VINDEX (function)
2410 && may_be_remote (basetype))
2411 need_vtbl = needed;
2412 else if (DECL_VINDEX (function))
2413 need_vtbl = fixed_type ? unneeded : needed;
2414 else
2415 need_vtbl = not_needed;
2416 }
2417
2418 if (TREE_CODE (fntype) == METHOD_TYPE && static_call_context
2419 && !DECL_CONSTRUCTOR_P (function))
2420 {
2421 /* Let's be nice to the user for now, and give reasonable
2422 default behavior. */
2423 instance_ptr = current_class_decl;
2424 if (instance_ptr)
2425 {
2426 if (basetype != current_class_type)
2427 {
2428 tree binfo = get_binfo (basetype, current_class_type, 1);
2429 if (binfo == NULL_TREE)
2430 {
2431 error_not_base_type (function, current_class_type);
2432 return error_mark_node;
2433 }
2434 else if (basetype == error_mark_node)
2435 return error_mark_node;
2436 }
2437 }
2438 /* Only allow a static member function to call another static member
2439 function. */
2440 else if (DECL_LANG_SPECIFIC (function)
2441 && !DECL_STATIC_FUNCTION_P (function))
2442 {
2443 cp_error ("cannot call member function `%D' without object",
2444 function);
2445 return error_mark_node;
2446 }
2447 }
2448
2449 value_type = TREE_TYPE (fntype) ? TREE_TYPE (fntype) : void_type_node;
2450
2451 if (TYPE_SIZE (value_type) == 0)
2452 {
2453 if (flags & LOOKUP_COMPLAIN)
2454 incomplete_type_error (0, value_type);
2455 return error_mark_node;
2456 }
2457
2458 if (DECL_STATIC_FUNCTION_P (function))
2459 parms = convert_arguments (NULL_TREE, TYPE_ARG_TYPES (fntype),
2460 TREE_CHAIN (parms), function, LOOKUP_NORMAL);
2461 else if (need_vtbl == unneeded)
2462 {
2463 int sub_flags = DECL_CONSTRUCTOR_P (function) ? flags : LOOKUP_NORMAL;
2464 basetype = TREE_TYPE (instance);
2465 if (TYPE_METHOD_BASETYPE (TREE_TYPE (function)) != TYPE_MAIN_VARIANT (basetype)
2466 && TYPE_USES_COMPLEX_INHERITANCE (basetype))
2467 {
2468 basetype = DECL_CLASS_CONTEXT (function);
2469 instance_ptr = convert_pointer_to (basetype, instance_ptr);
2470 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2471 }
2472 parms = tree_cons (NULL_TREE, instance_ptr,
2473 convert_arguments (NULL_TREE, TREE_CHAIN (TYPE_ARG_TYPES (fntype)), TREE_CHAIN (parms), function, sub_flags));
2474 }
2475 else
2476 {
2477 if ((flags & LOOKUP_NONVIRTUAL) == 0)
2478 basetype = DECL_CONTEXT (function);
2479
2480 /* First parm could be integer_zerop with casts like
2481 ((Object*)0)->Object::IsA() */
2482 if (!integer_zerop (TREE_VALUE (parms)))
2483 {
2484 /* Since we can't have inheritance with a union, doing get_binfo
2485 on it won't work. We do all the convert_pointer_to_real
2486 stuff to handle MI correctly...for unions, that's not
2487 an issue, so we must short-circuit that extra work here. */
2488 tree tmp = TREE_TYPE (TREE_TYPE (TREE_VALUE (parms)));
2489 if (tmp != NULL_TREE && TREE_CODE (tmp) == UNION_TYPE)
2490 instance_ptr = TREE_VALUE (parms);
2491 else
2492 {
2493 tree binfo = get_binfo (basetype,
2494 TREE_TYPE (TREE_TYPE (TREE_VALUE (parms))),
2495 0);
2496 instance_ptr = convert_pointer_to_real (binfo, TREE_VALUE (parms));
2497 }
2498 instance_ptr
2499 = convert_pointer_to (build_type_variant (basetype,
2500 constp, volatilep),
2501 instance_ptr);
2502
2503 if (TREE_CODE (instance_ptr) == COND_EXPR)
2504 {
2505 instance_ptr = save_expr (instance_ptr);
2506 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2507 }
2508 else if (TREE_CODE (instance_ptr) == NOP_EXPR
2509 && TREE_CODE (TREE_OPERAND (instance_ptr, 0)) == ADDR_EXPR
2510 && TREE_OPERAND (TREE_OPERAND (instance_ptr, 0), 0) == instance)
2511 ;
2512 /* The call to `convert_pointer_to' may return error_mark_node. */
2513 else if (TREE_CODE (instance_ptr) == ERROR_MARK)
2514 return instance_ptr;
2515 else if (instance == NULL_TREE
2516 || TREE_CODE (instance) != INDIRECT_REF
2517 || TREE_OPERAND (instance, 0) != instance_ptr)
2518 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2519 }
2520 parms = tree_cons (NULL_TREE, instance_ptr,
2521 convert_arguments (NULL_TREE, TREE_CHAIN (TYPE_ARG_TYPES (fntype)), TREE_CHAIN (parms), function, LOOKUP_NORMAL));
2522 }
2523
2524 #if 0
2525 /* Constructors do not overload method calls. */
2526 else if (TYPE_OVERLOADS_METHOD_CALL_EXPR (basetype)
2527 && name != TYPE_IDENTIFIER (basetype)
2528 && (TREE_CODE (function) != FUNCTION_DECL
2529 || strncmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)),
2530 OPERATOR_METHOD_FORMAT,
2531 OPERATOR_METHOD_LENGTH))
2532 && (may_be_remote (basetype) || instance != C_C_D))
2533 {
2534 tree fn_as_int;
2535
2536 parms = TREE_CHAIN (parms);
2537
2538 if (!all_virtual && TREE_CODE (function) == FUNCTION_DECL)
2539 fn_as_int = build_unary_op (ADDR_EXPR, function, 0);
2540 else
2541 fn_as_int = convert (TREE_TYPE (default_conversion (function)), DECL_VINDEX (function));
2542 if (all_virtual == 1)
2543 fn_as_int = convert (integer_type_node, fn_as_int);
2544
2545 result = build_opfncall (METHOD_CALL_EXPR, LOOKUP_NORMAL, instance, fn_as_int, parms);
2546
2547 if (result == NULL_TREE)
2548 {
2549 compiler_error ("could not overload `operator->()(...)'");
2550 return error_mark_node;
2551 }
2552 else if (result == error_mark_node)
2553 return error_mark_node;
2554
2555 #if 0
2556 /* Do this if we want the result of operator->() to inherit
2557 the type of the function it is subbing for. */
2558 TREE_TYPE (result) = value_type;
2559 #endif
2560
2561 return result;
2562 }
2563 #endif
2564
2565 if (need_vtbl == needed)
2566 {
2567 function = build_vfn_ref (&TREE_VALUE (parms), instance,
2568 DECL_VINDEX (function));
2569 TREE_TYPE (function) = build_pointer_type (fntype);
2570 }
2571
2572 if (TREE_CODE (function) == FUNCTION_DECL)
2573 GNU_xref_call (current_function_decl,
2574 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)));
2575
2576 {
2577 int is_constructor;
2578
2579 if (TREE_CODE (function) == FUNCTION_DECL)
2580 {
2581 is_constructor = DECL_CONSTRUCTOR_P (function);
2582 if (DECL_INLINE (function))
2583 function = build1 (ADDR_EXPR, build_pointer_type (fntype), function);
2584 else
2585 {
2586 assemble_external (function);
2587 TREE_USED (function) = 1;
2588 function = default_conversion (function);
2589 }
2590 }
2591 else
2592 {
2593 is_constructor = 0;
2594 function = default_conversion (function);
2595 }
2596
2597 result = build_nt (CALL_EXPR, function, parms, NULL_TREE);
2598
2599 TREE_TYPE (result) = value_type;
2600 TREE_SIDE_EFFECTS (result) = 1;
2601 TREE_HAS_CONSTRUCTOR (result) = is_constructor;
2602 return result;
2603 }
2604 }
2605
2606 /* Similar to `build_method_call', but for overloaded non-member functions.
2607 The name of this function comes through NAME. The name depends
2608 on PARMS.
2609
2610 Note that this function must handle simple `C' promotions,
2611 as well as variable numbers of arguments (...), and
2612 default arguments to boot.
2613
2614 If the overloading is successful, we return a tree node which
2615 contains the call to the function.
2616
2617 If overloading produces candidates which are probable, but not definite,
2618 we hold these candidates. If FINAL_CP is non-zero, then we are free
2619 to assume that final_cp points to enough storage for all candidates that
2620 this function might generate. The `harshness' array is preallocated for
2621 the first candidate, but not for subsequent ones.
2622
2623 Note that the DECL_RTL of FUNCTION must be made to agree with this
2624 function's new name. */
2625
2626 tree
2627 build_overload_call_real (fnname, parms, flags, final_cp, buildxxx)
2628 tree fnname, parms;
2629 int flags;
2630 struct candidate *final_cp;
2631 int buildxxx;
2632 {
2633 /* must check for overloading here */
2634 tree overload_name, functions, function, parm;
2635 tree parmtypes = NULL_TREE, last = NULL_TREE;
2636 register tree outer;
2637 int length;
2638 int parmlength = list_length (parms);
2639
2640 struct candidate *candidates, *cp;
2641
2642 if (final_cp)
2643 {
2644 final_cp[0].h.code = 0;
2645 final_cp[0].h.distance = 0;
2646 final_cp[0].function = 0;
2647 /* end marker. */
2648 final_cp[1].h.code = EVIL_CODE;
2649 }
2650
2651 for (parm = parms; parm; parm = TREE_CHAIN (parm))
2652 {
2653 register tree t = TREE_TYPE (TREE_VALUE (parm));
2654
2655 if (t == error_mark_node)
2656 {
2657 if (final_cp)
2658 final_cp->h.code = EVIL_CODE;
2659 return error_mark_node;
2660 }
2661 if (TREE_CODE (t) == OFFSET_TYPE)
2662 #if 0
2663 /* This breaks reference-to-array parameters. */
2664 || TREE_CODE (t) == ARRAY_TYPE
2665 #endif
2666 {
2667 /* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
2668 Also convert OFFSET_TYPE entities to their normal selves.
2669 This eliminates needless calls to `compute_conversion_costs'. */
2670 TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
2671 t = TREE_TYPE (TREE_VALUE (parm));
2672 }
2673 last = build_tree_list (NULL_TREE, t);
2674 parmtypes = chainon (parmtypes, last);
2675 }
2676 if (last)
2677 TREE_CHAIN (last) = void_list_node;
2678 else
2679 parmtypes = void_list_node;
2680
2681 if (is_overloaded_fn (fnname))
2682 {
2683 functions = fnname;
2684 if (TREE_CODE (fnname) == TREE_LIST)
2685 fnname = TREE_PURPOSE (functions);
2686 else if (TREE_CODE (fnname) == FUNCTION_DECL)
2687 fnname = DECL_NAME (functions);
2688 }
2689 else
2690 functions = lookup_name_nonclass (fnname);
2691
2692 if (functions == NULL_TREE)
2693 {
2694 if (flags & LOOKUP_SPECULATIVELY)
2695 return NULL_TREE;
2696 if (flags & LOOKUP_COMPLAIN)
2697 error ("only member functions apply");
2698 if (final_cp)
2699 final_cp->h.code = EVIL_CODE;
2700 return error_mark_node;
2701 }
2702
2703 if (TREE_CODE (functions) == FUNCTION_DECL && ! IDENTIFIER_OPNAME_P (fnname))
2704 {
2705 functions = DECL_MAIN_VARIANT (functions);
2706 if (final_cp)
2707 {
2708 /* We are just curious whether this is a viable alternative or
2709 not. */
2710 compute_conversion_costs (functions, parms, final_cp, parmlength);
2711 return functions;
2712 }
2713 else
2714 return build_function_call_real (functions, parms, 1, flags);
2715 }
2716
2717 if (TREE_CODE (functions) == TREE_LIST
2718 && TREE_VALUE (functions) == NULL_TREE)
2719 {
2720 if (flags & LOOKUP_SPECULATIVELY)
2721 return NULL_TREE;
2722
2723 if (flags & LOOKUP_COMPLAIN)
2724 cp_error ("function `%D' declared overloaded, but no instances of that function declared",
2725 TREE_PURPOSE (functions));
2726 if (final_cp)
2727 final_cp->h.code = EVIL_CODE;
2728 return error_mark_node;
2729 }
2730
2731 length = count_functions (functions);
2732
2733 if (final_cp)
2734 candidates = final_cp;
2735 else
2736 {
2737 candidates
2738 = (struct candidate *)alloca ((length+1) * sizeof (struct candidate));
2739 bzero ((char *) candidates, (length + 1) * sizeof (struct candidate));
2740 }
2741
2742 cp = candidates;
2743
2744 my_friendly_assert (is_overloaded_fn (functions), 169);
2745
2746 functions = get_first_fn (functions);
2747
2748 /* OUTER is the list of FUNCTION_DECLS, in a TREE_LIST. */
2749 for (outer = functions; outer; outer = DECL_CHAIN (outer))
2750 {
2751 int template_cost = 0;
2752 function = outer;
2753 if (TREE_CODE (function) != FUNCTION_DECL
2754 && ! (TREE_CODE (function) == TEMPLATE_DECL
2755 && ! DECL_TEMPLATE_IS_CLASS (function)
2756 && TREE_CODE (DECL_TEMPLATE_RESULT (function)) == FUNCTION_DECL))
2757 {
2758 enum tree_code code = TREE_CODE (function);
2759 if (code == TEMPLATE_DECL)
2760 code = TREE_CODE (DECL_TEMPLATE_RESULT (function));
2761 if (code == CONST_DECL)
2762 cp_error_at
2763 ("enumeral value `%D' conflicts with function of same name",
2764 function);
2765 else if (code == VAR_DECL)
2766 {
2767 if (TREE_STATIC (function))
2768 cp_error_at
2769 ("variable `%D' conflicts with function of same name",
2770 function);
2771 else
2772 cp_error_at
2773 ("constant field `%D' conflicts with function of same name",
2774 function);
2775 }
2776 else if (code == TYPE_DECL)
2777 continue;
2778 else
2779 my_friendly_abort (2);
2780 error ("at this point in file");
2781 continue;
2782 }
2783 if (TREE_CODE (function) == TEMPLATE_DECL)
2784 {
2785 int ntparms = TREE_VEC_LENGTH (DECL_TEMPLATE_PARMS (function));
2786 tree *targs = (tree *) alloca (sizeof (tree) * ntparms);
2787 int i;
2788
2789 i = type_unification (DECL_TEMPLATE_PARMS (function), targs,
2790 TYPE_ARG_TYPES (TREE_TYPE (function)),
2791 parms, &template_cost, 0);
2792 if (i == 0)
2793 function = instantiate_template (function, targs);
2794 }
2795
2796 if (TREE_CODE (function) == TEMPLATE_DECL)
2797 {
2798 /* Unconverted template -- failed match. */
2799 cp->function = function;
2800 cp->u.bad_arg = -4;
2801 cp->h.code = EVIL_CODE;
2802 }
2803 else
2804 {
2805 struct candidate *cp2;
2806
2807 /* Check that this decl is not the same as a function that's in
2808 the list due to some template instantiation. */
2809 cp2 = candidates;
2810 while (cp2 != cp)
2811 if (cp2->function == function)
2812 break;
2813 else
2814 cp2 += 1;
2815 if (cp2->function == function)
2816 continue;
2817
2818 function = DECL_MAIN_VARIANT (function);
2819
2820 /* Can't use alloca here, since result might be
2821 passed to calling function. */
2822 cp->h_len = parmlength;
2823 cp->harshness = (struct harshness_code *)
2824 oballoc ((parmlength + 1) * sizeof (struct harshness_code));
2825
2826 compute_conversion_costs (function, parms, cp, parmlength);
2827
2828 /* Make sure this is clear as well. */
2829 cp->h.int_penalty += template_cost;
2830
2831 if ((cp[0].h.code & EVIL_CODE) == 0)
2832 {
2833 cp[1].h.code = EVIL_CODE;
2834 cp++;
2835 }
2836 }
2837 }
2838
2839 if (cp - candidates)
2840 {
2841 tree rval = error_mark_node;
2842
2843 /* Leave marker. */
2844 cp[0].h.code = EVIL_CODE;
2845 if (cp - candidates > 1)
2846 {
2847 struct candidate *best_cp
2848 = ideal_candidate (NULL_TREE, candidates,
2849 cp - candidates, parms, parmlength);
2850 if (best_cp == (struct candidate *)0)
2851 {
2852 if (flags & LOOKUP_COMPLAIN)
2853 {
2854 cp_error ("call of overloaded `%D' is ambiguous", fnname);
2855 print_n_candidates (candidates, cp - candidates);
2856 }
2857 return error_mark_node;
2858 }
2859 else
2860 rval = best_cp->function;
2861 }
2862 else
2863 {
2864 cp -= 1;
2865 if (cp->h.code & EVIL_CODE)
2866 {
2867 if (flags & LOOKUP_COMPLAIN)
2868 error ("type conversion ambiguous");
2869 }
2870 else
2871 rval = cp->function;
2872 }
2873
2874 if (final_cp)
2875 return rval;
2876
2877 return buildxxx ? build_function_call_real (rval, parms, 0, flags)
2878 : build_function_call_real (rval, parms, 1, flags);
2879 }
2880
2881 if (flags & LOOKUP_SPECULATIVELY)
2882 return NULL_TREE;
2883
2884 if (flags & LOOKUP_COMPLAIN)
2885 report_type_mismatch (cp, parms, "function",
2886 decl_as_string (cp->function, 1));
2887
2888 return error_mark_node;
2889 }
2890
2891 tree
2892 build_overload_call (fnname, parms, flags, final_cp)
2893 tree fnname, parms;
2894 int flags;
2895 struct candidate *final_cp;
2896 {
2897 return build_overload_call_real (fnname, parms, flags, final_cp, 0);
2898 }
2899
2900 tree
2901 build_overload_call_maybe (fnname, parms, flags, final_cp)
2902 tree fnname, parms;
2903 int flags;
2904 struct candidate *final_cp;
2905 {
2906 return build_overload_call_real (fnname, parms, flags, final_cp, 1);
2907 }