83rd Cygnus<->FSF merge
[gcc.git] / gcc / cp / call.c
1 /* Functions related to invoking methods and overloaded functions.
2 Copyright (C) 1987, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.
3 Contributed by Michael Tiemann (tiemann@cygnus.com) and
4 hacked by Brendan Kehoe (brendan@cygnus.com).
5
6 This file is part of GNU CC.
7
8 GNU CC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 2, or (at your option)
11 any later version.
12
13 GNU CC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GNU CC; see the file COPYING. If not, write to
20 the Free Software Foundation, 59 Temple Place - Suite 330,
21 Boston, MA 02111-1307, USA. */
22
23
24 /* High-level class interface. */
25
26 #include "config.h"
27 #include "tree.h"
28 #include <stdio.h>
29 #include "cp-tree.h"
30 #include "class.h"
31 #include "output.h"
32 #include "flags.h"
33
34 #include "obstack.h"
35 #define obstack_chunk_alloc xmalloc
36 #define obstack_chunk_free free
37
38 extern void sorry ();
39
40 extern int inhibit_warnings;
41 extern int flag_assume_nonnull_objects;
42 extern tree ctor_label, dtor_label;
43
44 /* From typeck.c: */
45 extern tree unary_complex_lvalue ();
46
47 /* Compute the ease with which a conversion can be performed
48 between an expected and the given type. */
49 static struct harshness_code convert_harshness ();
50
51 #define EVIL_RETURN(ARG) ((ARG).code = EVIL_CODE, (ARG))
52 #define STD_RETURN(ARG) ((ARG).code = STD_CODE, (ARG))
53 #define QUAL_RETURN(ARG) ((ARG).code = QUAL_CODE, (ARG))
54 #define TRIVIAL_RETURN(ARG) ((ARG).code = TRIVIAL_CODE, (ARG))
55 #define ZERO_RETURN(ARG) ((ARG).code = 0, (ARG))
56
57 /* Ordering function for overload resolution. Compare two candidates
58 by gross quality. */
59 int
60 rank_for_overload (x, y)
61 struct candidate *x, *y;
62 {
63 if (y->h.code & (EVIL_CODE|ELLIPSIS_CODE|USER_CODE))
64 return y->h.code - x->h.code;
65 if (x->h.code & (EVIL_CODE|ELLIPSIS_CODE|USER_CODE))
66 return -1;
67
68 /* This is set by compute_conversion_costs, for calling a non-const
69 member function from a const member function. */
70 if ((y->harshness[0].code & CONST_CODE) ^ (x->harshness[0].code & CONST_CODE))
71 return y->harshness[0].code - x->harshness[0].code;
72
73 if (y->h.code & STD_CODE)
74 {
75 if (x->h.code & STD_CODE)
76 return y->h.distance - x->h.distance;
77 return 1;
78 }
79 if (x->h.code & STD_CODE)
80 return -1;
81
82 return y->h.code - x->h.code;
83 }
84
85 /* Compare two candidates, argument by argument. */
86 int
87 rank_for_ideal (x, y)
88 struct candidate *x, *y;
89 {
90 int i;
91
92 if (x->h_len != y->h_len)
93 abort ();
94
95 for (i = 0; i < x->h_len; i++)
96 {
97 if (y->harshness[i].code - x->harshness[i].code)
98 return y->harshness[i].code - x->harshness[i].code;
99 if ((y->harshness[i].code & STD_CODE)
100 && (y->harshness[i].distance - x->harshness[i].distance))
101 return y->harshness[i].distance - x->harshness[i].distance;
102
103 /* They're both the same code. Now see if we're dealing with an
104 integral promotion that needs a finer grain of accuracy. */
105 if (y->harshness[0].code & PROMO_CODE
106 && (y->harshness[i].int_penalty ^ x->harshness[i].int_penalty))
107 return y->harshness[i].int_penalty - x->harshness[i].int_penalty;
108 }
109 return 0;
110 }
111
112 /* TYPE is the type we wish to convert to. PARM is the parameter
113 we have to work with. We use a somewhat arbitrary cost function
114 to measure this conversion. */
115 static struct harshness_code
116 convert_harshness (type, parmtype, parm)
117 register tree type, parmtype;
118 tree parm;
119 {
120 struct harshness_code h;
121 register enum tree_code codel;
122 register enum tree_code coder;
123 int lvalue;
124
125 h.code = 0;
126 h.distance = 0;
127 h.int_penalty = 0;
128
129 #ifdef GATHER_STATISTICS
130 n_convert_harshness++;
131 #endif
132
133 if (TREE_CODE (parmtype) == REFERENCE_TYPE)
134 {
135 if (parm)
136 parm = convert_from_reference (parm);
137 parmtype = TREE_TYPE (parmtype);
138 lvalue = 1;
139 }
140 else if (parm)
141 lvalue = lvalue_p (parm);
142 else
143 lvalue = 0;
144
145 if (TYPE_PTRMEMFUNC_P (type))
146 type = TYPE_PTRMEMFUNC_FN_TYPE (type);
147 if (TYPE_PTRMEMFUNC_P (parmtype))
148 parmtype = TYPE_PTRMEMFUNC_FN_TYPE (parmtype);
149
150 codel = TREE_CODE (type);
151 coder = TREE_CODE (parmtype);
152
153 if (TYPE_MAIN_VARIANT (parmtype) == TYPE_MAIN_VARIANT (type))
154 return ZERO_RETURN (h);
155
156 if (coder == ERROR_MARK)
157 return EVIL_RETURN (h);
158
159 if (codel == REFERENCE_TYPE)
160 {
161 tree ttl, ttr;
162 int constp = parm ? TREE_READONLY (parm) : TYPE_READONLY (parmtype);
163 int volatilep = (parm ? TREE_THIS_VOLATILE (parm)
164 : TYPE_VOLATILE (parmtype));
165 register tree intype = TYPE_MAIN_VARIANT (parmtype);
166 register enum tree_code form = TREE_CODE (intype);
167 int penalty = 0;
168
169 ttl = TREE_TYPE (type);
170
171 /* Only allow const reference binding if we were given a parm to deal
172 with, since it isn't really a conversion. This is a hack to
173 prevent build_type_conversion from finding this conversion, but
174 still allow overloading to find it. */
175 if (! lvalue && ! (parm && TYPE_READONLY (ttl)))
176 return EVIL_RETURN (h);
177
178 if ((TYPE_READONLY (ttl) < constp)
179 || (TYPE_VOLATILE (ttl) < volatilep))
180 return EVIL_RETURN (h);
181
182 /* When passing a non-const argument into a const reference, dig it a
183 little, so a non-const reference is preferred over this one. */
184 penalty = ((TYPE_READONLY (ttl) > constp)
185 + (TYPE_VOLATILE (ttl) > volatilep));
186
187 ttl = TYPE_MAIN_VARIANT (ttl);
188
189 if (form == OFFSET_TYPE)
190 {
191 intype = TREE_TYPE (intype);
192 form = TREE_CODE (intype);
193 }
194
195 ttr = intype;
196
197 if (TREE_CODE (ttl) == ARRAY_TYPE && TREE_CODE (ttr) == ARRAY_TYPE)
198 {
199 if (comptypes (ttl, ttr, 1))
200 return ZERO_RETURN (h);
201 return EVIL_RETURN (h);
202 }
203
204 h = convert_harshness (ttl, ttr, NULL_TREE);
205 if (penalty && h.code == 0)
206 {
207 h.code = QUAL_CODE;
208 h.int_penalty = penalty;
209 }
210 return h;
211 }
212
213 if (codel == POINTER_TYPE && fntype_p (parmtype))
214 {
215 tree p1, p2;
216 struct harshness_code h1, h2;
217
218 /* Get to the METHOD_TYPE or FUNCTION_TYPE that this might be. */
219 type = TREE_TYPE (type);
220
221 if (coder == POINTER_TYPE)
222 {
223 parmtype = TREE_TYPE (parmtype);
224 coder = TREE_CODE (parmtype);
225 }
226
227 if (coder != TREE_CODE (type))
228 return EVIL_RETURN (h);
229
230 if (type != parmtype && coder == METHOD_TYPE)
231 {
232 tree ttl = TYPE_METHOD_BASETYPE (type);
233 tree ttr = TYPE_METHOD_BASETYPE (parmtype);
234
235 int b_or_d = get_base_distance (ttr, ttl, 0, 0);
236 if (b_or_d < 0)
237 {
238 b_or_d = get_base_distance (ttl, ttr, 0, 0);
239 if (b_or_d < 0)
240 return EVIL_RETURN (h);
241 h.distance = -b_or_d;
242 }
243 else
244 h.distance = b_or_d;
245 h.code = STD_CODE;
246
247 type = build_function_type
248 (TREE_TYPE (type), TREE_CHAIN (TYPE_ARG_TYPES (type)));
249 parmtype = build_function_type
250 (TREE_TYPE (parmtype), TREE_CHAIN (TYPE_ARG_TYPES (parmtype)));
251 }
252
253 /* We allow the default conversion between function type
254 and pointer-to-function type for free. */
255 if (comptypes (type, parmtype, 1))
256 return h;
257
258 if (pedantic)
259 return EVIL_RETURN (h);
260
261 /* Compare return types. */
262 p1 = TREE_TYPE (type);
263 p2 = TREE_TYPE (parmtype);
264 h2 = convert_harshness (p1, p2, NULL_TREE);
265 if (h2.code & EVIL_CODE)
266 return h2;
267
268 h1.code = TRIVIAL_CODE;
269 h1.distance = 0;
270
271 if (h2.distance != 0)
272 {
273 tree binfo;
274
275 /* This only works for pointers. */
276 if (TREE_CODE (p1) != POINTER_TYPE
277 && TREE_CODE (p1) != REFERENCE_TYPE)
278 return EVIL_RETURN (h);
279
280 p1 = TREE_TYPE (p1);
281 p2 = TREE_TYPE (p2);
282 /* Don't die if we happen to be dealing with void*. */
283 if (!IS_AGGR_TYPE (p1) || !IS_AGGR_TYPE (p2))
284 return EVIL_RETURN (h);
285 if (h2.distance < 0)
286 binfo = get_binfo (p2, p1, 0);
287 else
288 binfo = get_binfo (p1, p2, 0);
289
290 if (! BINFO_OFFSET_ZEROP (binfo))
291 {
292 #if 0
293 static int explained = 0;
294 if (h2.distance < 0)
295 message_2_types (sorry, "cannot cast `%s' to `%s' at function call site", p2, p1);
296 else
297 message_2_types (sorry, "cannot cast `%s' to `%s' at function call site", p1, p2);
298
299 if (! explained++)
300 sorry ("(because pointer values change during conversion)");
301 #endif
302 return EVIL_RETURN (h);
303 }
304 }
305
306 h1.code |= h2.code;
307 if (h2.distance > h1.distance)
308 h1.distance = h2.distance;
309
310 p1 = TYPE_ARG_TYPES (type);
311 p2 = TYPE_ARG_TYPES (parmtype);
312 while (p1 && TREE_VALUE (p1) != void_type_node
313 && p2 && TREE_VALUE (p2) != void_type_node)
314 {
315 h2 = convert_harshness (TREE_VALUE (p1), TREE_VALUE (p2),
316 NULL_TREE);
317 if (h2.code & EVIL_CODE)
318 return h2;
319
320 if (h2.distance)
321 {
322 /* This only works for pointers and references. */
323 if (TREE_CODE (TREE_VALUE (p1)) != POINTER_TYPE
324 && TREE_CODE (TREE_VALUE (p1)) != REFERENCE_TYPE)
325 return EVIL_RETURN (h);
326 h2.distance = - h2.distance;
327 }
328
329 h1.code |= h2.code;
330 if (h2.distance > h1.distance)
331 h1.distance = h2.distance;
332 p1 = TREE_CHAIN (p1);
333 p2 = TREE_CHAIN (p2);
334 }
335 if (p1 == p2)
336 return h1;
337 if (p2)
338 {
339 if (p1)
340 return EVIL_RETURN (h);
341 h1.code |= ELLIPSIS_CODE;
342 return h1;
343 }
344 if (p1)
345 {
346 if (TREE_PURPOSE (p1) == NULL_TREE)
347 h1.code |= EVIL_CODE;
348 return h1;
349 }
350 }
351 else if (codel == POINTER_TYPE && coder == OFFSET_TYPE)
352 {
353 tree ttl, ttr;
354
355 /* Get to the OFFSET_TYPE that this might be. */
356 type = TREE_TYPE (type);
357
358 if (coder != TREE_CODE (type))
359 return EVIL_RETURN (h);
360
361 ttl = TYPE_OFFSET_BASETYPE (type);
362 ttr = TYPE_OFFSET_BASETYPE (parmtype);
363
364 if (ttl == ttr)
365 h.code = 0;
366 else
367 {
368 int b_or_d = get_base_distance (ttr, ttl, 0, 0);
369 if (b_or_d < 0)
370 {
371 b_or_d = get_base_distance (ttl, ttr, 0, 0);
372 if (b_or_d < 0)
373 return EVIL_RETURN (h);
374 h.distance = -b_or_d;
375 }
376 else
377 h.distance = b_or_d;
378 h.code = STD_CODE;
379 }
380
381 /* Now test the OFFSET_TYPE's target compatibility. */
382 type = TREE_TYPE (type);
383 parmtype = TREE_TYPE (parmtype);
384 }
385
386 if (coder == UNKNOWN_TYPE)
387 {
388 if (codel == FUNCTION_TYPE
389 || codel == METHOD_TYPE
390 || (codel == POINTER_TYPE
391 && (TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE
392 || TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)))
393 return TRIVIAL_RETURN (h);
394 return EVIL_RETURN (h);
395 }
396
397 if (coder == VOID_TYPE)
398 return EVIL_RETURN (h);
399
400 if (codel == BOOLEAN_TYPE)
401 {
402 if (INTEGRAL_CODE_P (coder) || coder == REAL_TYPE)
403 return STD_RETURN (h);
404 else if (coder == POINTER_TYPE || coder == OFFSET_TYPE)
405 {
406 /* Make this worse than any conversion to another pointer.
407 FIXME this is how I think the language should work, but it may not
408 end up being how the language is standardized (jason 1/30/95). */
409 h.distance = 32767;
410 return STD_RETURN (h);
411 }
412 return EVIL_RETURN (h);
413 }
414
415 if (INTEGRAL_CODE_P (codel))
416 {
417 /* Control equivalence of ints an enums. */
418
419 if (codel == ENUMERAL_TYPE
420 && flag_int_enum_equivalence == 0)
421 {
422 /* Enums can be converted to ints, but not vice-versa. */
423 if (coder != ENUMERAL_TYPE
424 || TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (parmtype))
425 return EVIL_RETURN (h);
426 }
427
428 /* else enums and ints (almost) freely interconvert. */
429
430 if (INTEGRAL_CODE_P (coder))
431 {
432 if (TYPE_MAIN_VARIANT (type)
433 == TYPE_MAIN_VARIANT (type_promotes_to (parmtype)))
434 {
435 h.code = PROMO_CODE;
436 #if 0 /* What purpose does this serve? -jason */
437 /* A char, short, wchar_t, etc., should promote to an int if
438 it can handle it, otherwise to an unsigned. So we'll make
439 an unsigned. */
440 if (type != integer_type_node)
441 h.int_penalty = 1;
442 #endif
443 }
444 else
445 h.code = STD_CODE;
446
447 return h;
448 }
449 else if (coder == REAL_TYPE)
450 {
451 h.code = STD_CODE;
452 h.distance = 0;
453 return h;
454 }
455 }
456
457 if (codel == REAL_TYPE)
458 {
459 if (coder == REAL_TYPE)
460 {
461 if (TYPE_MAIN_VARIANT (type)
462 == TYPE_MAIN_VARIANT (type_promotes_to (parmtype)))
463 h.code = PROMO_CODE;
464 else
465 h.code = STD_CODE;
466
467 return h;
468 }
469 else if (INTEGRAL_CODE_P (coder))
470 {
471 h.code = STD_CODE;
472 h.distance = 0;
473 return h;
474 }
475 }
476
477 /* Convert arrays which have not previously been converted. */
478 #if 0
479 if (codel == ARRAY_TYPE)
480 codel = POINTER_TYPE;
481 #endif
482 if (coder == ARRAY_TYPE)
483 {
484 coder = POINTER_TYPE;
485 if (parm)
486 {
487 parm = decay_conversion (parm);
488 parmtype = TREE_TYPE (parm);
489 }
490 else
491 parmtype = build_pointer_type (TREE_TYPE (parmtype));
492 }
493
494 /* Conversions among pointers */
495 if (codel == POINTER_TYPE && coder == POINTER_TYPE)
496 {
497 register tree ttl = TYPE_MAIN_VARIANT (TREE_TYPE (type));
498 register tree ttr = TYPE_MAIN_VARIANT (TREE_TYPE (parmtype));
499 int penalty = 4 * (ttl != ttr);
500
501 /* Anything converts to void *. Since this may be `const void *'
502 (etc.) use VOID_TYPE instead of void_type_node. Otherwise, the
503 targets must be the same, except that we do allow (at some cost)
504 conversion between signed and unsigned pointer types. */
505
506 if ((TREE_CODE (ttl) == METHOD_TYPE
507 || TREE_CODE (ttl) == FUNCTION_TYPE)
508 && TREE_CODE (ttl) == TREE_CODE (ttr))
509 {
510 if (comptypes (ttl, ttr, -1))
511 {
512 h.code = penalty ? STD_CODE : 0;
513 h.distance = 0;
514 }
515 else
516 h.code = EVIL_CODE;
517 return h;
518 }
519
520 #if 1
521 if (TREE_CODE (ttl) != VOID_TYPE
522 && (TREE_CODE (ttr) != VOID_TYPE || !parm || !integer_zerop (parm)))
523 {
524 if (TREE_UNSIGNED (ttl) != TREE_UNSIGNED (ttr))
525 {
526 ttl = unsigned_type (ttl);
527 ttr = unsigned_type (ttr);
528 penalty = 10;
529 }
530 if (comp_target_types (type, parmtype, 1) <= 0)
531 return EVIL_RETURN (h);
532 }
533 #else
534 if (!(TREE_CODE (ttl) == VOID_TYPE
535 || TREE_CODE (ttr) == VOID_TYPE
536 || (TREE_UNSIGNED (ttl) ^ TREE_UNSIGNED (ttr)
537 && (ttl = unsigned_type (ttl),
538 ttr = unsigned_type (ttr),
539 penalty = 10, 0))
540 || (comp_target_types (ttl, ttr, 0) > 0)))
541 return EVIL_RETURN (h);
542 #endif
543
544 if (penalty == 10 || ttr == ttl)
545 {
546 tree tmp1 = TREE_TYPE (type), tmp2 = TREE_TYPE (parmtype);
547
548 /* If one was unsigned but the other wasn't, then we need to
549 do a standard conversion from T to unsigned T. */
550 if (penalty == 10)
551 h.code = PROMO_CODE; /* was STD_CODE */
552 else
553 h.code = 0;
554
555 /* Note conversion from `T*' to `const T*',
556 or `T*' to `volatile T*'. */
557 if (ttl == ttr
558 && ((TYPE_READONLY (tmp1) != TREE_READONLY (tmp2))
559 || (TYPE_VOLATILE (tmp1) != TYPE_VOLATILE (tmp2))))
560 h.code |= QUAL_CODE;
561
562 h.distance = 0;
563 return h;
564 }
565
566
567 if (TREE_CODE (ttl) == RECORD_TYPE && TREE_CODE (ttr) == RECORD_TYPE)
568 {
569 int b_or_d = get_base_distance (ttl, ttr, 0, 0);
570 if (b_or_d < 0)
571 {
572 b_or_d = get_base_distance (ttr, ttl, 0, 0);
573 if (b_or_d < 0)
574 return EVIL_RETURN (h);
575 h.distance = -b_or_d;
576 }
577 else
578 h.distance = b_or_d;
579 h.code = STD_CODE;
580 return h;
581 }
582
583 /* If converting from a `class*' to a `void*', make it
584 less favorable than any inheritance relationship. */
585 if (TREE_CODE (ttl) == VOID_TYPE && IS_AGGR_TYPE (ttr))
586 {
587 h.code = STD_CODE;
588 h.distance = CLASSTYPE_MAX_DEPTH (ttr)+1;
589 return h;
590 }
591
592 h.code = penalty ? STD_CODE : PROMO_CODE;
593 /* Catch things like `const char *' -> `const void *'
594 vs `const char *' -> `void *'. */
595 if (ttl != ttr)
596 {
597 tree tmp1 = TREE_TYPE (type), tmp2 = TREE_TYPE (parmtype);
598 if ((TYPE_READONLY (tmp1) != TREE_READONLY (tmp2))
599 || (TYPE_VOLATILE (tmp1) != TYPE_VOLATILE (tmp2)))
600 h.code |= QUAL_CODE;
601 }
602 return h;
603 }
604
605 if (codel == POINTER_TYPE && coder == INTEGER_TYPE)
606 {
607 /* This is not a bad match, but don't let it beat
608 integer-enum combinations. */
609 if (parm && integer_zerop (parm))
610 {
611 h.code = STD_CODE;
612 h.distance = 0;
613 return h;
614 }
615 }
616
617 /* C++: Since the `this' parameter of a signature member function
618 is represented as a signature pointer to handle default implementations
619 correctly, we can have the case that `type' is a signature pointer
620 while `parmtype' is a pointer to a signature table. We don't really
621 do any conversions in this case, so just return 0. */
622
623 if (codel == RECORD_TYPE && coder == POINTER_TYPE
624 && IS_SIGNATURE_POINTER (type) && IS_SIGNATURE (TREE_TYPE (parmtype)))
625 return ZERO_RETURN (h);
626
627 if (codel == RECORD_TYPE && coder == RECORD_TYPE)
628 {
629 int b_or_d = get_base_distance (type, parmtype, 0, 0);
630 if (b_or_d < 0)
631 {
632 b_or_d = get_base_distance (parmtype, type, 0, 0);
633 if (b_or_d < 0)
634 return EVIL_RETURN (h);
635 h.distance = -b_or_d;
636 }
637 else
638 h.distance = b_or_d;
639 h.code = STD_CODE;
640 return h;
641 }
642 return EVIL_RETURN (h);
643 }
644
645 /* A clone of build_type_conversion for checking user-defined conversions in
646 overload resolution. */
647
648 int
649 user_harshness (type, parmtype)
650 register tree type, parmtype;
651 {
652 tree conv;
653 tree winner = NULL_TREE;
654 int code;
655
656 {
657 tree typename = build_typename_overload (type);
658 if (lookup_fnfields (TYPE_BINFO (parmtype), typename, 0))
659 return 0;
660 }
661
662 for (conv = lookup_conversions (parmtype); conv; conv = TREE_CHAIN (conv))
663 {
664 struct harshness_code tmp;
665
666 if (winner && TREE_PURPOSE (winner) == TREE_PURPOSE (conv))
667 continue;
668
669 if (tmp = convert_harshness (type, TREE_VALUE (conv), NULL_TREE),
670 (tmp.code < USER_CODE) && (tmp.distance >= 0))
671 {
672 if (winner)
673 return EVIL_CODE;
674 else
675 {
676 winner = conv;
677 code = tmp.code;
678 }
679 }
680 }
681
682 if (winner)
683 return code;
684
685 return -1;
686 }
687
688 int
689 can_convert (to, from)
690 tree to, from;
691 {
692 struct harshness_code h;
693 h = convert_harshness (to, from, NULL_TREE);
694 return (h.code < USER_CODE) && (h.distance >= 0);
695 }
696
697 int
698 can_convert_arg (to, from, arg)
699 tree to, from, arg;
700 {
701 struct harshness_code h;
702 h = convert_harshness (to, from, arg);
703 return (h.code < USER_CODE) && (h.distance >= 0);
704 }
705
706 #ifdef DEBUG_MATCHING
707 static char *
708 print_harshness (h)
709 struct harshness_code *h;
710 {
711 static char buf[1024];
712 char tmp[1024];
713
714 bzero (buf, 1024 * sizeof (char));
715 strcat (buf, "codes=[");
716 if (h->code & EVIL_CODE)
717 strcat (buf, "EVIL");
718 if (h->code & CONST_CODE)
719 strcat (buf, " CONST");
720 if (h->code & ELLIPSIS_CODE)
721 strcat (buf, " ELLIPSIS");
722 if (h->code & USER_CODE)
723 strcat (buf, " USER");
724 if (h->code & STD_CODE)
725 strcat (buf, " STD");
726 if (h->code & PROMO_CODE)
727 strcat (buf, " PROMO");
728 if (h->code & QUAL_CODE)
729 strcat (buf, " QUAL");
730 if (h->code & TRIVIAL_CODE)
731 strcat (buf, " TRIVIAL");
732 if (buf[0] == '\0')
733 strcat (buf, "0");
734
735 sprintf (tmp, "] distance=%d int_penalty=%d", h->distance, h->int_penalty);
736
737 strcat (buf, tmp);
738
739 return buf;
740 }
741 #endif
742
743 /* Algorithm: For each argument, calculate how difficult it is to
744 make FUNCTION accept that argument. If we can easily tell that
745 FUNCTION won't be acceptable to one of the arguments, then we
746 don't need to compute the ease of converting the other arguments,
747 since it will never show up in the intersection of all arguments'
748 favorite functions.
749
750 Conversions between builtin and user-defined types are allowed, but
751 no function involving such a conversion is preferred to one which
752 does not require such a conversion. Furthermore, such conversions
753 must be unique. */
754
755 void
756 compute_conversion_costs (function, tta_in, cp, arglen)
757 tree function;
758 tree tta_in;
759 struct candidate *cp;
760 int arglen;
761 {
762 tree ttf_in = TYPE_ARG_TYPES (TREE_TYPE (function));
763 tree ttf = ttf_in;
764 tree tta = tta_in;
765
766 /* Start out with no strikes against. */
767 int evil_strikes = 0;
768 int ellipsis_strikes = 0;
769 int user_strikes = 0;
770 int b_or_d_strikes = 0;
771 int easy_strikes = 0;
772
773 int strike_index = 0, win;
774 struct harshness_code lose;
775 extern int cp_silent;
776
777 #ifdef GATHER_STATISTICS
778 n_compute_conversion_costs++;
779 #endif
780
781 #ifndef DEBUG_MATCHING
782 /* We don't emit any warnings or errors while trying out each candidate. */
783 cp_silent = 1;
784 #endif
785
786 cp->function = function;
787 cp->arg = tta ? TREE_VALUE (tta) : NULL_TREE;
788 cp->u.bad_arg = 0; /* optimistic! */
789
790 cp->h.code = 0;
791 cp->h.distance = 0;
792 cp->h.int_penalty = 0;
793 bzero ((char *) cp->harshness,
794 (cp->h_len + 1) * sizeof (struct harshness_code));
795
796 while (ttf && tta)
797 {
798 struct harshness_code h;
799
800 if (ttf == void_list_node)
801 break;
802
803 if (type_unknown_p (TREE_VALUE (tta)))
804 {
805 /* Must perform some instantiation here. */
806 tree rhs = TREE_VALUE (tta);
807 tree lhstype = TREE_VALUE (ttf);
808
809 /* Keep quiet about possible contravariance violations. */
810 int old_inhibit_warnings = inhibit_warnings;
811 inhibit_warnings = 1;
812
813 /* @@ This is to undo what `grokdeclarator' does to
814 parameter types. It really should go through
815 something more general. */
816
817 TREE_TYPE (tta) = unknown_type_node;
818 rhs = instantiate_type (lhstype, rhs, 0);
819 inhibit_warnings = old_inhibit_warnings;
820
821 if (TREE_CODE (rhs) == ERROR_MARK)
822 h.code = EVIL_CODE;
823 else
824 h = convert_harshness (lhstype, TREE_TYPE (rhs), rhs);
825 }
826 else
827 {
828 #ifdef DEBUG_MATCHING
829 static tree old_function = NULL_TREE;
830
831 if (!old_function || function != old_function)
832 {
833 cp_error ("trying %D", function);
834 old_function = function;
835 }
836
837 cp_error (" doing (%T) %E against arg %T",
838 TREE_TYPE (TREE_VALUE (tta)), TREE_VALUE (tta),
839 TREE_VALUE (ttf));
840 #endif
841
842 h = convert_harshness (TREE_VALUE (ttf),
843 TREE_TYPE (TREE_VALUE (tta)),
844 TREE_VALUE (tta));
845
846 #ifdef DEBUG_MATCHING
847 cp_error (" evaluated %s", print_harshness (&h));
848 #endif
849 }
850
851 cp->harshness[strike_index] = h;
852 if ((h.code & EVIL_CODE)
853 || ((h.code & STD_CODE) && h.distance < 0))
854 {
855 cp->u.bad_arg = strike_index;
856 evil_strikes = 1;
857 }
858 else if (h.code & ELLIPSIS_CODE)
859 ellipsis_strikes += 1;
860 #if 0
861 /* This is never set by `convert_harshness'. */
862 else if (h.code & USER_CODE)
863 {
864 user_strikes += 1;
865 }
866 #endif
867 else
868 {
869 if ((h.code & STD_CODE) && h.distance)
870 {
871 if (h.distance > b_or_d_strikes)
872 b_or_d_strikes = h.distance;
873 }
874 else
875 easy_strikes += (h.code & (STD_CODE|PROMO_CODE|TRIVIAL_CODE));
876 cp->h.code |= h.code;
877 /* Make sure we communicate this. */
878 cp->h.int_penalty += h.int_penalty;
879 }
880
881 ttf = TREE_CHAIN (ttf);
882 tta = TREE_CHAIN (tta);
883 strike_index += 1;
884 }
885
886 if (tta)
887 {
888 /* ran out of formals, and parmlist is fixed size. */
889 if (ttf /* == void_type_node */)
890 {
891 cp->h.code = EVIL_CODE;
892 cp->u.bad_arg = -1;
893 cp_silent = 0;
894 return;
895 }
896 else
897 {
898 struct harshness_code h;
899 int l = list_length (tta);
900 ellipsis_strikes += l;
901 h.code = ELLIPSIS_CODE;
902 h.distance = 0;
903 h.int_penalty = 0;
904 for (; l; --l)
905 cp->harshness[strike_index++] = h;
906 }
907 }
908 else if (ttf && ttf != void_list_node)
909 {
910 /* ran out of actuals, and no defaults. */
911 if (TREE_PURPOSE (ttf) == NULL_TREE)
912 {
913 cp->h.code = EVIL_CODE;
914 cp->u.bad_arg = -2;
915 cp_silent = 0;
916 return;
917 }
918 /* Store index of first default. */
919 cp->harshness[arglen].distance = strike_index+1;
920 }
921 else
922 cp->harshness[arglen].distance = 0;
923
924 /* Argument list lengths work out, so don't need to check them again. */
925 if (evil_strikes)
926 {
927 /* We do not check for derived->base conversions here, since in
928 no case would they give evil strike counts, unless such conversions
929 are somehow ambiguous. */
930
931 /* See if any user-defined conversions apply.
932 But make sure that we do not loop. */
933 static int dont_convert_types = 0;
934
935 if (dont_convert_types)
936 {
937 cp->h.code = EVIL_CODE;
938 cp_silent = 0;
939 return;
940 }
941
942 win = 0; /* Only get one chance to win. */
943 ttf = TYPE_ARG_TYPES (TREE_TYPE (function));
944 tta = tta_in;
945 strike_index = 0;
946 evil_strikes = 0;
947
948 while (ttf && tta)
949 {
950 if (ttf == void_list_node)
951 break;
952
953 lose = cp->harshness[strike_index];
954 if ((lose.code & EVIL_CODE)
955 || ((lose.code & STD_CODE) && lose.distance < 0))
956 {
957 tree actual_type = TREE_TYPE (TREE_VALUE (tta));
958 tree formal_type = TREE_VALUE (ttf);
959 int extra_conversions = 0;
960
961 dont_convert_types = 1;
962
963 if (TREE_CODE (formal_type) == REFERENCE_TYPE)
964 formal_type = TREE_TYPE (formal_type);
965 if (TREE_CODE (actual_type) == REFERENCE_TYPE)
966 actual_type = TREE_TYPE (actual_type);
967
968 if (formal_type != error_mark_node
969 && actual_type != error_mark_node)
970 {
971 formal_type = TYPE_MAIN_VARIANT (formal_type);
972 actual_type = TYPE_MAIN_VARIANT (actual_type);
973
974 if (TYPE_HAS_CONSTRUCTOR (formal_type))
975 {
976 /* If it has a constructor for this type,
977 try to use it. */
978 /* @@ There is no way to save this result yet, so
979 success is a NULL_TREE for now. */
980 if (convert_to_aggr (formal_type, TREE_VALUE (tta), 0, 1)
981 != error_mark_node)
982 win++;
983 }
984 if (TYPE_LANG_SPECIFIC (actual_type)
985 && TYPE_HAS_CONVERSION (actual_type))
986 {
987 int extra = user_harshness (formal_type, actual_type);
988
989 if (extra == EVIL_CODE)
990 win += 2;
991 else if (extra >= 0)
992 {
993 win++;
994 extra_conversions = extra;
995 }
996 }
997 }
998 dont_convert_types = 0;
999
1000 if (win == 1)
1001 {
1002 user_strikes += 1;
1003 cp->harshness[strike_index].code
1004 = USER_CODE | (extra_conversions ? STD_CODE : 0);
1005 win = 0;
1006 }
1007 else
1008 {
1009 if (cp->u.bad_arg > strike_index)
1010 cp->u.bad_arg = strike_index;
1011
1012 evil_strikes = win ? 2 : 1;
1013 break;
1014 }
1015 }
1016
1017 ttf = TREE_CHAIN (ttf);
1018 tta = TREE_CHAIN (tta);
1019 strike_index += 1;
1020 }
1021 }
1022
1023 /* Const member functions get a small penalty because defaulting
1024 to const is less useful than defaulting to non-const. */
1025 /* This is bogus, it does not correspond to anything in the ARM.
1026 This code will be fixed when this entire section is rewritten
1027 to conform to the ARM. (mrs) */
1028 if (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE)
1029 {
1030 tree this_parm = TREE_VALUE (ttf_in);
1031
1032 if (TREE_CODE (this_parm) == RECORD_TYPE /* Is `this' a sig ptr? */
1033 ? TYPE_READONLY (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (this_parm))))
1034 : TYPE_READONLY (TREE_TYPE (this_parm)))
1035 {
1036 cp->harshness[0].code |= TRIVIAL_CODE;
1037 ++easy_strikes;
1038 }
1039 else
1040 {
1041 /* Calling a non-const member function from a const member function
1042 is probably invalid, but for now we let it only draw a warning.
1043 We indicate that such a mismatch has occurred by setting the
1044 harshness to a maximum value. */
1045 if (TREE_CODE (TREE_TYPE (TREE_VALUE (tta_in))) == POINTER_TYPE
1046 && (TYPE_READONLY (TREE_TYPE (TREE_TYPE (TREE_VALUE (tta_in))))))
1047 cp->harshness[0].code |= CONST_CODE;
1048 }
1049 }
1050
1051 if (evil_strikes)
1052 cp->h.code = EVIL_CODE;
1053 if (ellipsis_strikes)
1054 cp->h.code |= ELLIPSIS_CODE;
1055 if (user_strikes)
1056 cp->h.code |= USER_CODE;
1057 cp_silent = 0;
1058 #ifdef DEBUG_MATCHING
1059 cp_error ("final eval %s", print_harshness (&cp->h));
1060 #endif
1061 }
1062
1063 /* Subroutine of ideal_candidate. See if X or Y is a better match
1064 than the other. */
1065 static int
1066 strictly_better (x, y)
1067 unsigned short x, y;
1068 {
1069 unsigned short xor;
1070
1071 if (x == y)
1072 return 0;
1073
1074 xor = x ^ y;
1075 if (xor >= x || xor >= y)
1076 return 1;
1077 return 0;
1078 }
1079
1080 /* When one of several possible overloaded functions and/or methods
1081 can be called, choose the best candidate for overloading.
1082
1083 BASETYPE is the context from which we start method resolution
1084 or NULL if we are comparing overloaded functions.
1085 CANDIDATES is the array of candidates we have to choose from.
1086 N_CANDIDATES is the length of CANDIDATES.
1087 PARMS is a TREE_LIST of parameters to the function we'll ultimately
1088 choose. It is modified in place when resolving methods. It is not
1089 modified in place when resolving overloaded functions.
1090 LEN is the length of the parameter list. */
1091
1092 static struct candidate *
1093 ideal_candidate (candidates, n_candidates, len)
1094 struct candidate *candidates;
1095 int n_candidates;
1096 int len;
1097 {
1098 struct candidate *cp = candidates+n_candidates;
1099 int i, j = -1, best_code;
1100
1101 /* For each argument, sort the functions from best to worst for the arg.
1102 For each function that's not best for this arg, set its overall
1103 harshness to EVIL so that other args won't like it. The candidate
1104 list for the last argument is the intersection of all the best-liked
1105 functions. */
1106
1107 #if 0
1108 for (i = 0; i < len; i++)
1109 {
1110 qsort (candidates, n_candidates, sizeof (struct candidate),
1111 rank_for_overload);
1112 best_code = cp[-1].h.code;
1113
1114 /* To find out functions that are worse than that represented
1115 by BEST_CODE, we can't just do a comparison like h.code>best_code.
1116 The total harshness for the "best" fn may be 8|8 for two args, and
1117 the harshness for the next-best may be 8|2. If we just compared,
1118 that would be checking 8>10, which would lead to the next-best
1119 being disqualified. What we actually want to do is get rid
1120 of functions that are definitely worse than that represented
1121 by best_code, i.e. those which have bits set higher than the
1122 highest in best_code. Sooooo, what we do is clear out everything
1123 represented by best_code, and see if we still come up with something
1124 higher. If so (e.g., 8|8 vs 8|16), it'll disqualify it properly. */
1125 for (j = n_candidates-2; j >= 0; j--)
1126 if ((candidates[j].h.code & ~best_code) > best_code)
1127 candidates[j].h.code = EVIL_CODE;
1128 }
1129
1130 if (cp[-1].h.code & EVIL_CODE)
1131 return NULL;
1132 #else
1133 qsort (candidates, n_candidates, sizeof (struct candidate),
1134 rank_for_overload);
1135 best_code = cp[-1].h.code;
1136 #endif
1137
1138 /* If they're at least as good as each other, do an arg-by-arg check. */
1139 if (! strictly_better (cp[-1].h.code, cp[-2].h.code))
1140 {
1141 int better = 0;
1142 int worse = 0;
1143
1144 for (j = 0; j < n_candidates; j++)
1145 if (! strictly_better (candidates[j].h.code, best_code))
1146 break;
1147
1148 qsort (candidates+j, n_candidates-j, sizeof (struct candidate),
1149 rank_for_ideal);
1150 for (i = 0; i < len; i++)
1151 {
1152 if (cp[-1].harshness[i].code < cp[-2].harshness[i].code)
1153 better = 1;
1154 else if (cp[-1].harshness[i].code > cp[-2].harshness[i].code)
1155 worse = 1;
1156 else if (cp[-1].harshness[i].code & STD_CODE)
1157 {
1158 /* If it involves a standard conversion, let the
1159 inheritance lattice be the final arbiter. */
1160 if (cp[-1].harshness[i].distance > cp[-2].harshness[i].distance)
1161 worse = 1;
1162 else if (cp[-1].harshness[i].distance < cp[-2].harshness[i].distance)
1163 better = 1;
1164 }
1165 else if (cp[-1].harshness[i].code & PROMO_CODE)
1166 {
1167 /* For integral promotions, take into account a finer
1168 granularity for determining which types should be favored
1169 over others in such promotions. */
1170 if (cp[-1].harshness[i].int_penalty > cp[-2].harshness[i].int_penalty)
1171 worse = 1;
1172 else if (cp[-1].harshness[i].int_penalty < cp[-2].harshness[i].int_penalty)
1173 better = 1;
1174 }
1175 }
1176
1177 if (! better || worse)
1178 return NULL;
1179 }
1180 return cp-1;
1181 }
1182
1183 /* Assume that if the class referred to is not in the
1184 current class hierarchy, that it may be remote.
1185 PARENT is assumed to be of aggregate type here. */
1186 static int
1187 may_be_remote (parent)
1188 tree parent;
1189 {
1190 if (TYPE_OVERLOADS_METHOD_CALL_EXPR (parent) == 0)
1191 return 0;
1192
1193 if (current_class_type == NULL_TREE)
1194 return 0;
1195
1196 if (parent == current_class_type)
1197 return 0;
1198
1199 if (UNIQUELY_DERIVED_FROM_P (parent, current_class_type))
1200 return 0;
1201 return 1;
1202 }
1203
1204 tree
1205 build_vfield_ref (datum, type)
1206 tree datum, type;
1207 {
1208 tree rval;
1209 int old_assume_nonnull_objects = flag_assume_nonnull_objects;
1210
1211 if (datum == error_mark_node)
1212 return error_mark_node;
1213
1214 /* Vtable references are always made from non-null objects. */
1215 flag_assume_nonnull_objects = 1;
1216 if (TREE_CODE (TREE_TYPE (datum)) == REFERENCE_TYPE)
1217 datum = convert_from_reference (datum);
1218
1219 if (! TYPE_USES_COMPLEX_INHERITANCE (type))
1220 rval = build (COMPONENT_REF, TREE_TYPE (CLASSTYPE_VFIELD (type)),
1221 datum, CLASSTYPE_VFIELD (type));
1222 else
1223 rval = build_component_ref (datum, DECL_NAME (CLASSTYPE_VFIELD (type)), 0, 0);
1224 flag_assume_nonnull_objects = old_assume_nonnull_objects;
1225
1226 return rval;
1227 }
1228
1229 /* Build a call to a member of an object. I.e., one that overloads
1230 operator ()(), or is a pointer-to-function or pointer-to-method. */
1231 static tree
1232 build_field_call (basetype_path, instance_ptr, name, parms)
1233 tree basetype_path, instance_ptr, name, parms;
1234 {
1235 tree field, instance;
1236
1237 if (instance_ptr == current_class_decl)
1238 {
1239 /* Check to see if we really have a reference to an instance variable
1240 with `operator()()' overloaded. */
1241 field = IDENTIFIER_CLASS_VALUE (name);
1242
1243 if (field == NULL_TREE)
1244 {
1245 cp_error ("`this' has no member named `%D'", name);
1246 return error_mark_node;
1247 }
1248
1249 if (TREE_CODE (field) == FIELD_DECL)
1250 {
1251 /* If it's a field, try overloading operator (),
1252 or calling if the field is a pointer-to-function. */
1253 instance = build_component_ref_1 (C_C_D, field, 0);
1254 if (instance == error_mark_node)
1255 return error_mark_node;
1256
1257 if (TYPE_LANG_SPECIFIC (TREE_TYPE (instance))
1258 && TYPE_OVERLOADS_CALL_EXPR (TREE_TYPE (instance)))
1259 return build_opfncall (CALL_EXPR, LOOKUP_NORMAL, instance, parms, NULL_TREE);
1260
1261 if (TREE_CODE (TREE_TYPE (instance)) == POINTER_TYPE)
1262 {
1263 if (TREE_CODE (TREE_TYPE (TREE_TYPE (instance))) == FUNCTION_TYPE)
1264 return build_function_call (instance, parms);
1265 else if (TREE_CODE (TREE_TYPE (TREE_TYPE (instance))) == METHOD_TYPE)
1266 return build_function_call (instance, tree_cons (NULL_TREE, current_class_decl, parms));
1267 }
1268 }
1269 return NULL_TREE;
1270 }
1271
1272 /* Check to see if this is not really a reference to an instance variable
1273 with `operator()()' overloaded. */
1274 field = lookup_field (basetype_path, name, 1, 0);
1275
1276 /* This can happen if the reference was ambiguous or for access
1277 violations. */
1278 if (field == error_mark_node)
1279 return error_mark_node;
1280
1281 if (field)
1282 {
1283 tree basetype;
1284 tree ftype = TREE_TYPE (field);
1285
1286 if (TREE_CODE (ftype) == REFERENCE_TYPE)
1287 ftype = TREE_TYPE (ftype);
1288
1289 if (TYPE_LANG_SPECIFIC (ftype) && TYPE_OVERLOADS_CALL_EXPR (ftype))
1290 {
1291 /* Make the next search for this field very short. */
1292 basetype = DECL_FIELD_CONTEXT (field);
1293 instance_ptr = convert_pointer_to (basetype, instance_ptr);
1294
1295 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1296 return build_opfncall (CALL_EXPR, LOOKUP_NORMAL,
1297 build_component_ref_1 (instance, field, 0),
1298 parms, NULL_TREE);
1299 }
1300 if (TREE_CODE (ftype) == POINTER_TYPE)
1301 {
1302 if (TREE_CODE (TREE_TYPE (ftype)) == FUNCTION_TYPE
1303 || TREE_CODE (TREE_TYPE (ftype)) == METHOD_TYPE)
1304 {
1305 /* This is a member which is a pointer to function. */
1306 tree ref
1307 = build_component_ref_1 (build_indirect_ref (instance_ptr,
1308 NULL_PTR),
1309 field, LOOKUP_COMPLAIN);
1310 if (ref == error_mark_node)
1311 return error_mark_node;
1312 return build_function_call (ref, parms);
1313 }
1314 }
1315 else if (TREE_CODE (ftype) == METHOD_TYPE)
1316 {
1317 error ("invalid call via pointer-to-member function");
1318 return error_mark_node;
1319 }
1320 else
1321 return NULL_TREE;
1322 }
1323 return NULL_TREE;
1324 }
1325
1326 tree
1327 find_scoped_type (type, inner_name, inner_types)
1328 tree type, inner_name, inner_types;
1329 {
1330 tree tags = CLASSTYPE_TAGS (type);
1331
1332 while (tags)
1333 {
1334 /* The TREE_PURPOSE of an enum tag (which becomes a member of the
1335 enclosing class) is set to the name for the enum type. So, if
1336 inner_name is `bar', and we strike `baz' for `enum bar { baz }',
1337 then this test will be true. */
1338 if (TREE_PURPOSE (tags) == inner_name)
1339 {
1340 if (inner_types == NULL_TREE)
1341 return DECL_NESTED_TYPENAME (TYPE_NAME (TREE_VALUE (tags)));
1342 return resolve_scope_to_name (TREE_VALUE (tags), inner_types);
1343 }
1344 tags = TREE_CHAIN (tags);
1345 }
1346
1347 /* Look for a TYPE_DECL. */
1348 for (tags = TYPE_FIELDS (type); tags; tags = TREE_CHAIN (tags))
1349 if (TREE_CODE (tags) == TYPE_DECL && DECL_NAME (tags) == inner_name)
1350 {
1351 /* Code by raeburn. */
1352 if (inner_types == NULL_TREE)
1353 return DECL_NESTED_TYPENAME (tags);
1354 return resolve_scope_to_name (TREE_TYPE (tags), inner_types);
1355 }
1356
1357 return NULL_TREE;
1358 }
1359
1360 /* Resolve an expression NAME1::NAME2::...::NAMEn to
1361 the name that names the above nested type. INNER_TYPES
1362 is a chain of nested type names (held together by SCOPE_REFs);
1363 OUTER_TYPE is the type we know to enclose INNER_TYPES.
1364 Returns NULL_TREE if there is an error. */
1365 tree
1366 resolve_scope_to_name (outer_type, inner_stuff)
1367 tree outer_type, inner_stuff;
1368 {
1369 register tree tmp;
1370 tree inner_name, inner_type;
1371
1372 if (outer_type == NULL_TREE && current_class_type != NULL_TREE)
1373 {
1374 /* We first try to look for a nesting in our current class context,
1375 then try any enclosing classes. */
1376 tree type = current_class_type;
1377
1378 while (type && (TREE_CODE (type) == RECORD_TYPE
1379 || TREE_CODE (type) == UNION_TYPE))
1380 {
1381 tree rval = resolve_scope_to_name (type, inner_stuff);
1382
1383 if (rval != NULL_TREE)
1384 return rval;
1385 type = DECL_CONTEXT (TYPE_NAME (type));
1386 }
1387 }
1388
1389 if (TREE_CODE (inner_stuff) == SCOPE_REF)
1390 {
1391 inner_name = TREE_OPERAND (inner_stuff, 0);
1392 inner_type = TREE_OPERAND (inner_stuff, 1);
1393 }
1394 else
1395 {
1396 inner_name = inner_stuff;
1397 inner_type = NULL_TREE;
1398 }
1399
1400 if (outer_type == NULL_TREE)
1401 {
1402 tree x;
1403 /* If we have something that's already a type by itself,
1404 use that. */
1405 if (IDENTIFIER_HAS_TYPE_VALUE (inner_name))
1406 {
1407 if (inner_type)
1408 return resolve_scope_to_name (IDENTIFIER_TYPE_VALUE (inner_name),
1409 inner_type);
1410 return inner_name;
1411 }
1412
1413 x = lookup_name (inner_name, 0);
1414
1415 if (x && TREE_CODE (x) == NAMESPACE_DECL)
1416 {
1417 x = lookup_namespace_name (x, inner_type);
1418 return x;
1419 }
1420 return NULL_TREE;
1421 }
1422
1423 if (! IS_AGGR_TYPE (outer_type))
1424 return NULL_TREE;
1425
1426 /* Look for member classes or enums. */
1427 tmp = find_scoped_type (outer_type, inner_name, inner_type);
1428
1429 /* If it's not a type in this class, then go down into the
1430 base classes and search there. */
1431 if (! tmp && TYPE_BINFO (outer_type))
1432 {
1433 tree binfos = TYPE_BINFO_BASETYPES (outer_type);
1434 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
1435
1436 for (i = 0; i < n_baselinks; i++)
1437 {
1438 tree base_binfo = TREE_VEC_ELT (binfos, i);
1439 tmp = resolve_scope_to_name (BINFO_TYPE (base_binfo), inner_stuff);
1440 if (tmp)
1441 return tmp;
1442 }
1443 tmp = NULL_TREE;
1444 }
1445
1446 return tmp;
1447 }
1448
1449 /* Build a method call of the form `EXP->SCOPES::NAME (PARMS)'.
1450 This is how virtual function calls are avoided. */
1451 tree
1452 build_scoped_method_call (exp, basetype, name, parms)
1453 tree exp, basetype, name, parms;
1454 {
1455 /* Because this syntactic form does not allow
1456 a pointer to a base class to be `stolen',
1457 we need not protect the derived->base conversion
1458 that happens here.
1459
1460 @@ But we do have to check access privileges later. */
1461 tree binfo, decl;
1462 tree type = TREE_TYPE (exp);
1463
1464 if (type == error_mark_node
1465 || basetype == error_mark_node)
1466 return error_mark_node;
1467
1468 if (current_template_parms)
1469 {
1470 if (TREE_CODE (name) == BIT_NOT_EXPR)
1471 {
1472 tree type = get_aggr_from_typedef (TREE_OPERAND (name, 0), 1);
1473 name = build_min_nt (BIT_NOT_EXPR, type);
1474 }
1475 name = build_min_nt (SCOPE_REF, basetype, name);
1476 return build_min_nt (METHOD_CALL_EXPR, name, exp, parms, 0);
1477 }
1478
1479 if (TREE_CODE (type) == REFERENCE_TYPE)
1480 type = TREE_TYPE (type);
1481
1482 /* Destructors can be "called" for simple types; see 5.2.4 and 12.4 Note
1483 that explicit ~int is caught in the parser; this deals with typedefs
1484 and template parms. */
1485 if (TREE_CODE (name) == BIT_NOT_EXPR && ! IS_AGGR_TYPE (basetype))
1486 {
1487 if (type != basetype)
1488 cp_error ("type of `%E' does not match destructor type `%T' (type was `%T')",
1489 exp, basetype, type);
1490 name = TREE_OPERAND (name, 0);
1491 if (basetype != name && basetype != get_type_value (name))
1492 cp_error ("qualified type `%T' does not match destructor name `~%T'",
1493 basetype, name);
1494 return convert (void_type_node, exp);
1495 }
1496
1497 if (! is_aggr_type (basetype, 1))
1498 return error_mark_node;
1499
1500 if (! IS_AGGR_TYPE (type))
1501 {
1502 cp_error ("base object `%E' of scoped method call is of non-aggregate type `%T'",
1503 exp, type);
1504 return error_mark_node;
1505 }
1506
1507 if ((binfo = binfo_or_else (basetype, type)))
1508 {
1509 if (binfo == error_mark_node)
1510 return error_mark_node;
1511 if (TREE_CODE (exp) == INDIRECT_REF)
1512 decl = build_indirect_ref (convert_pointer_to (binfo,
1513 build_unary_op (ADDR_EXPR, exp, 0)), NULL_PTR);
1514 else
1515 decl = build_scoped_ref (exp, basetype);
1516
1517 /* Call to a destructor. */
1518 if (TREE_CODE (name) == BIT_NOT_EXPR)
1519 {
1520 /* Explicit call to destructor. */
1521 name = TREE_OPERAND (name, 0);
1522 if (! (name == TYPE_MAIN_VARIANT (TREE_TYPE (decl))
1523 || name == constructor_name (TREE_TYPE (decl))
1524 || TREE_TYPE (decl) == get_type_value (name)))
1525 {
1526 cp_error
1527 ("qualified type `%T' does not match destructor name `~%T'",
1528 TREE_TYPE (decl), name);
1529 return error_mark_node;
1530 }
1531 if (! TYPE_HAS_DESTRUCTOR (TREE_TYPE (decl)))
1532 return convert (void_type_node, exp);
1533
1534 return build_delete (TREE_TYPE (decl), decl, integer_two_node,
1535 LOOKUP_NORMAL|LOOKUP_NONVIRTUAL|LOOKUP_DESTRUCTOR,
1536 0);
1537 }
1538
1539 /* Call to a method. */
1540 return build_method_call (decl, name, parms, binfo,
1541 LOOKUP_NORMAL|LOOKUP_NONVIRTUAL);
1542 }
1543 return error_mark_node;
1544 }
1545
1546 static void
1547 print_candidates (candidates)
1548 tree candidates;
1549 {
1550 cp_error_at ("candidates are: %D", TREE_VALUE (candidates));
1551 candidates = TREE_CHAIN (candidates);
1552
1553 while (candidates)
1554 {
1555 cp_error_at (" %D", TREE_VALUE (candidates));
1556 candidates = TREE_CHAIN (candidates);
1557 }
1558 }
1559
1560 static void
1561 print_n_candidates (candidates, n)
1562 struct candidate *candidates;
1563 int n;
1564 {
1565 int i;
1566
1567 cp_error_at ("candidates are: %D", candidates[0].function);
1568 for (i = 1; i < n; i++)
1569 cp_error_at (" %D", candidates[i].function);
1570 }
1571
1572 /* Build something of the form ptr->method (args)
1573 or object.method (args). This can also build
1574 calls to constructors, and find friends.
1575
1576 Member functions always take their class variable
1577 as a pointer.
1578
1579 INSTANCE is a class instance.
1580
1581 NAME is the name of the method desired, usually an IDENTIFIER_NODE.
1582
1583 PARMS help to figure out what that NAME really refers to.
1584
1585 BASETYPE_PATH, if non-NULL, contains a chain from the type of INSTANCE
1586 down to the real instance type to use for access checking. We need this
1587 information to get protected accesses correct. This parameter is used
1588 by build_member_call.
1589
1590 FLAGS is the logical disjunction of zero or more LOOKUP_
1591 flags. See cp-tree.h for more info.
1592
1593 If this is all OK, calls build_function_call with the resolved
1594 member function.
1595
1596 This function must also handle being called to perform
1597 initialization, promotion/coercion of arguments, and
1598 instantiation of default parameters.
1599
1600 Note that NAME may refer to an instance variable name. If
1601 `operator()()' is defined for the type of that field, then we return
1602 that result. */
1603 tree
1604 build_method_call (instance, name, parms, basetype_path, flags)
1605 tree instance, name, parms, basetype_path;
1606 int flags;
1607 {
1608 register tree function, fntype, value_type;
1609 register tree basetype, save_basetype;
1610 register tree baselink, result, parmtypes, parm;
1611 #if 0
1612 register tree method_name;
1613 #endif
1614 tree last;
1615 int pass;
1616 tree access = access_public_node;
1617
1618 /* Range of cases for vtable optimization. */
1619 enum vtable_needs { not_needed, maybe_needed, unneeded, needed };
1620 enum vtable_needs need_vtbl = not_needed;
1621
1622 char *name_kind;
1623 int ever_seen = 0;
1624 tree instance_ptr = NULL_TREE;
1625 int all_virtual = flag_all_virtual;
1626 int static_call_context = 0;
1627 tree found_fns = NULL_TREE;
1628
1629 /* Keep track of `const' and `volatile' objects. */
1630 int constp, volatilep;
1631
1632 #ifdef GATHER_STATISTICS
1633 n_build_method_call++;
1634 #endif
1635
1636 if (instance == error_mark_node
1637 || name == error_mark_node
1638 || parms == error_mark_node
1639 || (instance != NULL_TREE && TREE_TYPE (instance) == error_mark_node))
1640 return error_mark_node;
1641
1642 if (current_template_parms)
1643 {
1644 if (TREE_CODE (name) == BIT_NOT_EXPR)
1645 {
1646 tree type = get_aggr_from_typedef (TREE_OPERAND (name, 0), 1);
1647 name = build_min_nt (BIT_NOT_EXPR, type);
1648 }
1649
1650 return build_min_nt (METHOD_CALL_EXPR, name, instance, parms, 0);
1651 }
1652
1653 /* This is the logic that magically deletes the second argument to
1654 operator delete, if it is not needed. */
1655 if (name == ansi_opname[(int) DELETE_EXPR] && list_length (parms)==2)
1656 {
1657 tree save_last = TREE_CHAIN (parms);
1658 tree result;
1659 /* get rid of unneeded argument */
1660 TREE_CHAIN (parms) = NULL_TREE;
1661 result = build_method_call (instance, name, parms, basetype_path,
1662 (LOOKUP_SPECULATIVELY|flags)
1663 &~LOOKUP_COMPLAIN);
1664 /* If it finds a match, return it. */
1665 if (result)
1666 return build_method_call (instance, name, parms, basetype_path, flags);
1667 /* If it doesn't work, two argument delete must work */
1668 TREE_CHAIN (parms) = save_last;
1669 }
1670 /* We already know whether it's needed or not for vec delete. */
1671 else if (name == ansi_opname[(int) VEC_DELETE_EXPR]
1672 && TYPE_LANG_SPECIFIC (TREE_TYPE (instance))
1673 && ! TYPE_VEC_DELETE_TAKES_SIZE (TREE_TYPE (instance)))
1674 TREE_CHAIN (parms) = NULL_TREE;
1675
1676 if (TREE_CODE (name) == BIT_NOT_EXPR)
1677 {
1678 flags |= LOOKUP_DESTRUCTOR;
1679 name = TREE_OPERAND (name, 0);
1680 if (parms)
1681 error ("destructors take no parameters");
1682 basetype = TREE_TYPE (instance);
1683 if (TREE_CODE (basetype) == REFERENCE_TYPE)
1684 basetype = TREE_TYPE (basetype);
1685 if (! (name == basetype
1686 || (IS_AGGR_TYPE (basetype)
1687 && name == constructor_name (basetype))
1688 || basetype == get_type_value (name)))
1689 {
1690 cp_error ("destructor name `~%D' does not match type `%T' of expression",
1691 name, basetype);
1692 return convert (void_type_node, instance);
1693 }
1694
1695 if (! TYPE_HAS_DESTRUCTOR (basetype))
1696 return convert (void_type_node, instance);
1697 instance = default_conversion (instance);
1698 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1699 return build_delete (build_pointer_type (basetype),
1700 instance_ptr, integer_two_node,
1701 LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 0);
1702 }
1703
1704 {
1705 char *xref_name;
1706
1707 /* Initialize name for error reporting. */
1708 if (IDENTIFIER_OPNAME_P (name) && ! IDENTIFIER_TYPENAME_P (name))
1709 {
1710 char *p = operator_name_string (name);
1711 xref_name = (char *)alloca (strlen (p) + 10);
1712 sprintf (xref_name, "operator %s", p);
1713 }
1714 else if (TREE_CODE (name) == SCOPE_REF)
1715 xref_name = IDENTIFIER_POINTER (TREE_OPERAND (name, 1));
1716 else
1717 xref_name = IDENTIFIER_POINTER (name);
1718
1719 GNU_xref_call (current_function_decl, xref_name);
1720 }
1721
1722 if (instance == NULL_TREE)
1723 {
1724 basetype = NULL_TREE;
1725 /* Check cases where this is really a call to raise
1726 an exception. */
1727 if (current_class_type && TREE_CODE (name) == IDENTIFIER_NODE)
1728 {
1729 basetype = purpose_member (name, CLASSTYPE_TAGS (current_class_type));
1730 if (basetype)
1731 basetype = TREE_VALUE (basetype);
1732 }
1733 else if (TREE_CODE (name) == SCOPE_REF
1734 && TREE_CODE (TREE_OPERAND (name, 0)) == IDENTIFIER_NODE)
1735 {
1736 if (! is_aggr_typedef (TREE_OPERAND (name, 0), 1))
1737 return error_mark_node;
1738 basetype = purpose_member (TREE_OPERAND (name, 1),
1739 CLASSTYPE_TAGS (IDENTIFIER_TYPE_VALUE (TREE_OPERAND (name, 0))));
1740 if (basetype)
1741 basetype = TREE_VALUE (basetype);
1742 }
1743
1744 if (basetype != NULL_TREE)
1745 ;
1746 /* call to a constructor... */
1747 else if (basetype_path)
1748 basetype = BINFO_TYPE (basetype_path);
1749 else if (IDENTIFIER_HAS_TYPE_VALUE (name))
1750 {
1751 basetype = IDENTIFIER_TYPE_VALUE (name);
1752 name = constructor_name (basetype);
1753 }
1754 else
1755 {
1756 tree typedef_name = lookup_name (name, 1);
1757 if (typedef_name && TREE_CODE (typedef_name) == TYPE_DECL)
1758 {
1759 /* Canonicalize the typedef name. */
1760 basetype = TREE_TYPE (typedef_name);
1761 name = TYPE_IDENTIFIER (basetype);
1762 }
1763 else
1764 {
1765 cp_error ("no constructor named `%T' in scope",
1766 name);
1767 return error_mark_node;
1768 }
1769 }
1770
1771 if (! IS_AGGR_TYPE (basetype))
1772 {
1773 non_aggr_error:
1774 if ((flags & LOOKUP_COMPLAIN) && TREE_CODE (basetype) != ERROR_MARK)
1775 cp_error ("request for member `%D' in `%E', which is of non-aggregate type `%T'",
1776 name, instance, basetype);
1777
1778 return error_mark_node;
1779 }
1780 }
1781 else if (instance == C_C_D || instance == current_class_decl)
1782 {
1783 /* When doing initialization, we side-effect the TREE_TYPE of
1784 C_C_D, hence we cannot set up BASETYPE from CURRENT_CLASS_TYPE. */
1785 basetype = TREE_TYPE (C_C_D);
1786
1787 /* Anything manifestly `this' in constructors and destructors
1788 has a known type, so virtual function tables are not needed. */
1789 if (TYPE_VIRTUAL_P (basetype)
1790 && !(flags & LOOKUP_NONVIRTUAL))
1791 need_vtbl = (dtor_label || ctor_label)
1792 ? unneeded : maybe_needed;
1793
1794 /* If `this' is a signature pointer and `name' is not a constructor,
1795 we are calling a signature member function. In that case, set the
1796 `basetype' to the signature type and dereference the `optr' field. */
1797 if (IS_SIGNATURE_POINTER (basetype)
1798 && TYPE_IDENTIFIER (basetype) != name)
1799 {
1800 basetype = SIGNATURE_TYPE (basetype);
1801 instance_ptr = build_optr_ref (instance);
1802 instance_ptr = convert (build_pointer_type (basetype), instance_ptr);
1803 basetype_path = TYPE_BINFO (basetype);
1804 }
1805 else
1806 {
1807 instance = C_C_D;
1808 instance_ptr = current_class_decl;
1809 basetype_path = TYPE_BINFO (current_class_type);
1810 }
1811 result = build_field_call (basetype_path, instance_ptr, name, parms);
1812
1813 if (result)
1814 return result;
1815 }
1816 else if (TREE_CODE (instance) == RESULT_DECL)
1817 {
1818 basetype = TREE_TYPE (instance);
1819 /* Should we ever have to make a virtual function reference
1820 from a RESULT_DECL, know that it must be of fixed type
1821 within the scope of this function. */
1822 if (!(flags & LOOKUP_NONVIRTUAL) && TYPE_VIRTUAL_P (basetype))
1823 need_vtbl = maybe_needed;
1824 instance_ptr = build1 (ADDR_EXPR, build_pointer_type (basetype), instance);
1825 }
1826 else
1827 {
1828 /* The MAIN_VARIANT of the type that `instance_ptr' winds up being. */
1829 tree inst_ptr_basetype;
1830
1831 static_call_context =
1832 (TREE_CODE (instance) == INDIRECT_REF
1833 && TREE_CODE (TREE_OPERAND (instance, 0)) == NOP_EXPR
1834 && TREE_OPERAND (TREE_OPERAND (instance, 0), 0) == error_mark_node);
1835
1836 if (TREE_CODE (instance) == OFFSET_REF)
1837 instance = resolve_offset_ref (instance);
1838
1839 /* the base type of an instance variable is pointer to class */
1840 basetype = TREE_TYPE (instance);
1841
1842 if (TREE_CODE (basetype) == REFERENCE_TYPE)
1843 {
1844 basetype = TREE_TYPE (basetype);
1845 if (! IS_AGGR_TYPE (basetype))
1846 goto non_aggr_error;
1847 /* Call to convert not needed because we are remaining
1848 within the same type. */
1849 instance_ptr = build1 (NOP_EXPR, build_pointer_type (basetype),
1850 instance);
1851 inst_ptr_basetype = TYPE_MAIN_VARIANT (basetype);
1852 }
1853 else
1854 {
1855 if (! IS_AGGR_TYPE (basetype)
1856 && ! (TYPE_LANG_SPECIFIC (basetype)
1857 && (IS_SIGNATURE_POINTER (basetype)
1858 || IS_SIGNATURE_REFERENCE (basetype))))
1859 goto non_aggr_error;
1860
1861 /* If `instance' is a signature pointer/reference and `name' is
1862 not a constructor, we are calling a signature member function.
1863 In that case set the `basetype' to the signature type. */
1864 if ((IS_SIGNATURE_POINTER (basetype)
1865 || IS_SIGNATURE_REFERENCE (basetype))
1866 && TYPE_IDENTIFIER (basetype) != name)
1867 basetype = SIGNATURE_TYPE (basetype);
1868
1869 if ((IS_SIGNATURE (basetype)
1870 && (instance_ptr = instance))
1871 || (lvalue_p (instance)
1872 && (instance_ptr = build_unary_op (ADDR_EXPR, instance, 0)))
1873 || (instance_ptr = unary_complex_lvalue (ADDR_EXPR, instance)))
1874 {
1875 if (instance_ptr == error_mark_node)
1876 return error_mark_node;
1877 }
1878 else if (TREE_CODE (instance) == NOP_EXPR
1879 || TREE_CODE (instance) == CONSTRUCTOR)
1880 {
1881 /* A cast is not an lvalue. Initialize a fresh temp
1882 with the value we are casting from, and proceed with
1883 that temporary. We can't cast to a reference type,
1884 so that simplifies the initialization to something
1885 we can manage. */
1886 tree temp = get_temp_name (TREE_TYPE (instance), 0);
1887 if (IS_AGGR_TYPE (TREE_TYPE (instance)))
1888 expand_aggr_init (temp, instance, 0, flags);
1889 else
1890 {
1891 store_init_value (temp, instance);
1892 expand_decl_init (temp);
1893 }
1894 instance = temp;
1895 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1896 }
1897 else
1898 {
1899 if (TREE_CODE (instance) != CALL_EXPR)
1900 my_friendly_abort (125);
1901 if (TYPE_NEEDS_CONSTRUCTING (basetype))
1902 instance = build_cplus_new (basetype, instance);
1903 else
1904 {
1905 instance = get_temp_name (basetype, 0);
1906 TREE_ADDRESSABLE (instance) = 1;
1907 }
1908 instance_ptr = build_unary_op (ADDR_EXPR, instance, 0);
1909 }
1910 /* @@ Should we call comp_target_types here? */
1911 if (IS_SIGNATURE (basetype))
1912 inst_ptr_basetype = basetype;
1913 else
1914 inst_ptr_basetype = TREE_TYPE (TREE_TYPE (instance_ptr));
1915 if (TYPE_MAIN_VARIANT (basetype) == TYPE_MAIN_VARIANT (inst_ptr_basetype))
1916 basetype = inst_ptr_basetype;
1917 else
1918 {
1919 instance_ptr = convert (build_pointer_type (basetype), instance_ptr);
1920 if (instance_ptr == error_mark_node)
1921 return error_mark_node;
1922 }
1923 }
1924
1925 /* After converting `instance_ptr' above, `inst_ptr_basetype' was
1926 not updated, so we use `basetype' instead. */
1927 if (basetype_path == NULL_TREE
1928 && IS_SIGNATURE (basetype))
1929 basetype_path = TYPE_BINFO (basetype);
1930 else if (basetype_path == NULL_TREE ||
1931 BINFO_TYPE (basetype_path) != TYPE_MAIN_VARIANT (inst_ptr_basetype))
1932 basetype_path = TYPE_BINFO (inst_ptr_basetype);
1933
1934 result = build_field_call (basetype_path, instance_ptr, name, parms);
1935 if (result)
1936 return result;
1937
1938 if (!(flags & LOOKUP_NONVIRTUAL) && TYPE_VIRTUAL_P (basetype))
1939 {
1940 if (TREE_SIDE_EFFECTS (instance_ptr))
1941 {
1942 /* This action is needed because the instance is needed
1943 for providing the base of the virtual function table.
1944 Without using a SAVE_EXPR, the function we are building
1945 may be called twice, or side effects on the instance
1946 variable (such as a post-increment), may happen twice. */
1947 instance_ptr = save_expr (instance_ptr);
1948 instance = build_indirect_ref (instance_ptr, NULL_PTR);
1949 }
1950 else if (TREE_CODE (TREE_TYPE (instance)) == POINTER_TYPE)
1951 {
1952 /* This happens when called for operator new (). */
1953 instance = build_indirect_ref (instance, NULL_PTR);
1954 }
1955
1956 need_vtbl = maybe_needed;
1957 }
1958 }
1959
1960 if (TYPE_SIZE (complete_type (basetype)) == 0)
1961 {
1962 /* This is worth complaining about, I think. */
1963 cp_error ("cannot lookup method in incomplete type `%T'", basetype);
1964 return error_mark_node;
1965 }
1966
1967 save_basetype = TYPE_MAIN_VARIANT (basetype);
1968
1969 #if 0
1970 if (all_virtual == 1
1971 && (! strncmp (IDENTIFIER_POINTER (name), OPERATOR_METHOD_FORMAT,
1972 OPERATOR_METHOD_LENGTH)
1973 || instance_ptr == NULL_TREE
1974 || (TYPE_OVERLOADS_METHOD_CALL_EXPR (basetype) == 0)))
1975 all_virtual = 0;
1976 #endif
1977
1978 last = NULL_TREE;
1979 for (parmtypes = NULL_TREE, parm = parms; parm; parm = TREE_CHAIN (parm))
1980 {
1981 tree t = TREE_TYPE (TREE_VALUE (parm));
1982 if (TREE_CODE (t) == OFFSET_TYPE)
1983 {
1984 /* Convert OFFSET_TYPE entities to their normal selves. */
1985 TREE_VALUE (parm) = resolve_offset_ref (TREE_VALUE (parm));
1986 t = TREE_TYPE (TREE_VALUE (parm));
1987 }
1988 if (TREE_CODE (TREE_VALUE (parm)) == OFFSET_REF
1989 && TREE_CODE (t) == METHOD_TYPE)
1990 {
1991 TREE_VALUE (parm) = build_unary_op (ADDR_EXPR, TREE_VALUE (parm), 0);
1992 }
1993 #if 0
1994 /* This breaks reference-to-array parameters. */
1995 if (TREE_CODE (t) == ARRAY_TYPE)
1996 {
1997 /* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
1998 This eliminates needless calls to `compute_conversion_costs'. */
1999 TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
2000 t = TREE_TYPE (TREE_VALUE (parm));
2001 }
2002 #endif
2003 if (t == error_mark_node)
2004 return error_mark_node;
2005 last = build_tree_list (NULL_TREE, t);
2006 parmtypes = chainon (parmtypes, last);
2007 }
2008
2009 if (instance && IS_SIGNATURE (basetype))
2010 {
2011 /* @@ Should this be the constp/volatilep flags for the optr field
2012 of the signature pointer? */
2013 constp = TYPE_READONLY (basetype);
2014 volatilep = TYPE_VOLATILE (basetype);
2015 parms = tree_cons (NULL_TREE, instance_ptr, parms);
2016 }
2017 else if (instance)
2018 {
2019 /* TREE_READONLY (instance) fails for references. */
2020 constp = TYPE_READONLY (TREE_TYPE (TREE_TYPE (instance_ptr)));
2021 volatilep = TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (instance_ptr)));
2022 parms = tree_cons (NULL_TREE, instance_ptr, parms);
2023 }
2024 else
2025 {
2026 /* Raw constructors are always in charge. */
2027 if (TYPE_USES_VIRTUAL_BASECLASSES (basetype)
2028 && ! (flags & LOOKUP_HAS_IN_CHARGE))
2029 {
2030 flags |= LOOKUP_HAS_IN_CHARGE;
2031 parms = tree_cons (NULL_TREE, integer_one_node, parms);
2032 parmtypes = tree_cons (NULL_TREE, integer_type_node, parmtypes);
2033 }
2034
2035 constp = 0;
2036 volatilep = 0;
2037 instance_ptr = build_int_2 (0, 0);
2038 TREE_TYPE (instance_ptr) = build_pointer_type (basetype);
2039 parms = tree_cons (NULL_TREE, instance_ptr, parms);
2040 }
2041
2042 parmtypes = tree_cons (NULL_TREE, TREE_TYPE (instance_ptr), parmtypes);
2043
2044 if (last == NULL_TREE)
2045 last = parmtypes;
2046
2047 /* Look up function name in the structure type definition. */
2048
2049 if ((IDENTIFIER_HAS_TYPE_VALUE (name)
2050 && ! IDENTIFIER_OPNAME_P (name)
2051 && IS_AGGR_TYPE (IDENTIFIER_TYPE_VALUE (name)))
2052 || name == constructor_name (basetype))
2053 {
2054 tree tmp = NULL_TREE;
2055 if (IDENTIFIER_TYPE_VALUE (name) == basetype
2056 || name == constructor_name (basetype))
2057 tmp = TYPE_BINFO (basetype);
2058 else
2059 tmp = get_binfo (IDENTIFIER_TYPE_VALUE (name), basetype, 0);
2060
2061 if (tmp != NULL_TREE)
2062 {
2063 name_kind = "constructor";
2064
2065 if (TYPE_USES_VIRTUAL_BASECLASSES (basetype)
2066 && ! (flags & LOOKUP_HAS_IN_CHARGE))
2067 {
2068 /* Constructors called for initialization
2069 only are never in charge. */
2070 tree tmplist;
2071
2072 flags |= LOOKUP_HAS_IN_CHARGE;
2073 tmplist = tree_cons (NULL_TREE, integer_zero_node,
2074 TREE_CHAIN (parms));
2075 TREE_CHAIN (parms) = tmplist;
2076 tmplist = tree_cons (NULL_TREE, integer_type_node, TREE_CHAIN (parmtypes));
2077 TREE_CHAIN (parmtypes) = tmplist;
2078 }
2079 basetype = BINFO_TYPE (tmp);
2080 }
2081 else
2082 name_kind = "method";
2083 }
2084 else
2085 name_kind = "method";
2086
2087 if (basetype_path == NULL_TREE
2088 || BINFO_TYPE (basetype_path) != TYPE_MAIN_VARIANT (basetype))
2089 basetype_path = TYPE_BINFO (basetype);
2090 result = lookup_fnfields (basetype_path, name,
2091 (flags & LOOKUP_COMPLAIN));
2092 if (result == error_mark_node)
2093 return error_mark_node;
2094
2095
2096 #if 0
2097 /* Now, go look for this method name. We do not find destructors here.
2098
2099 Putting `void_list_node' on the end of the parmtypes
2100 fakes out `build_decl_overload' into doing the right thing. */
2101 TREE_CHAIN (last) = void_list_node;
2102 method_name = build_decl_overload (name, parmtypes,
2103 1 + (name == constructor_name (save_basetype)
2104 || name == constructor_name_full (save_basetype)));
2105 TREE_CHAIN (last) = NULL_TREE;
2106 #endif
2107
2108 for (pass = 0; pass < 2; pass++)
2109 {
2110 struct candidate *candidates;
2111 struct candidate *cp;
2112 int len;
2113 unsigned best = 1;
2114
2115 /* This increments every time we go up the type hierarchy.
2116 The idea is to prefer a function of the derived class if possible. */
2117 int b_or_d = 0;
2118
2119 baselink = result;
2120
2121 if (pass > 0)
2122 {
2123 candidates
2124 = (struct candidate *) alloca ((ever_seen+1)
2125 * sizeof (struct candidate));
2126 bzero ((char *) candidates, (ever_seen + 1) * sizeof (struct candidate));
2127 cp = candidates;
2128 len = list_length (parms);
2129 ever_seen = 0;
2130
2131 /* First see if a global function has a shot at it. */
2132 if (flags & LOOKUP_GLOBAL)
2133 {
2134 tree friend_parms;
2135 tree parm = instance_ptr;
2136
2137 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE)
2138 parm = convert_from_reference (parm);
2139 else if (TREE_CODE (TREE_TYPE (parm)) == POINTER_TYPE)
2140 parm = build_indirect_ref (parm, "friendifying parms (compiler error)");
2141 else
2142 my_friendly_abort (167);
2143
2144 friend_parms = tree_cons (NULL_TREE, parm, TREE_CHAIN (parms));
2145
2146 cp->h_len = len;
2147 cp->harshness = (struct harshness_code *)
2148 alloca ((len + 1) * sizeof (struct harshness_code));
2149
2150 result = build_overload_call (name, friend_parms, 0, cp);
2151 /* If it turns out to be the one we were actually looking for
2152 (it was probably a friend function), the return the
2153 good result. */
2154 if (TREE_CODE (result) == CALL_EXPR)
2155 return result;
2156
2157 while ((cp->h.code & EVIL_CODE) == 0)
2158 {
2159 /* non-standard uses: set the field to 0 to indicate
2160 we are using a non-member function. */
2161 cp->u.field = 0;
2162 if (cp->harshness[len].distance == 0
2163 && cp->h.code < best)
2164 best = cp->h.code;
2165 cp += 1;
2166 }
2167 }
2168 }
2169
2170 while (baselink)
2171 {
2172 /* We have a hit (of sorts). If the parameter list is
2173 "error_mark_node", or some variant thereof, it won't
2174 match any methods. Since we have verified that the is
2175 some method vaguely matching this one (in name at least),
2176 silently return.
2177
2178 Don't stop for friends, however. */
2179 basetype_path = TREE_PURPOSE (baselink);
2180
2181 function = TREE_VALUE (baselink);
2182 if (TREE_CODE (basetype_path) == TREE_LIST)
2183 basetype_path = TREE_VALUE (basetype_path);
2184 basetype = BINFO_TYPE (basetype_path);
2185
2186 #if 0
2187 /* Cast the instance variable if necessary. */
2188 if (basetype != TYPE_MAIN_VARIANT
2189 (TREE_TYPE (TREE_TYPE (TREE_VALUE (parms)))))
2190 {
2191 if (basetype == save_basetype)
2192 TREE_VALUE (parms) = instance_ptr;
2193 else
2194 {
2195 tree type = build_pointer_type
2196 (build_type_variant (basetype, constp, volatilep));
2197 TREE_VALUE (parms) = convert_force (type, instance_ptr, 0);
2198 }
2199 }
2200
2201 /* FIXME: this is the wrong place to get an error. Hopefully
2202 the access-control rewrite will make this change more cleanly. */
2203 if (TREE_VALUE (parms) == error_mark_node)
2204 return error_mark_node;
2205 #endif
2206
2207 if (DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (function)))
2208 function = DECL_CHAIN (function);
2209
2210 for (; function; function = DECL_CHAIN (function))
2211 {
2212 #ifdef GATHER_STATISTICS
2213 n_inner_fields_searched++;
2214 #endif
2215 ever_seen++;
2216 if (pass > 0)
2217 found_fns = tree_cons (NULL_TREE, function, found_fns);
2218
2219 /* Not looking for friends here. */
2220 if (TREE_CODE (TREE_TYPE (function)) == FUNCTION_TYPE
2221 && ! DECL_STATIC_FUNCTION_P (function))
2222 continue;
2223
2224 #if 0
2225 if (pass == 0
2226 && DECL_ASSEMBLER_NAME (function) == method_name)
2227 goto found;
2228 #endif
2229
2230 if (pass > 0)
2231 {
2232 tree these_parms = parms;
2233
2234 #ifdef GATHER_STATISTICS
2235 n_inner_fields_searched++;
2236 #endif
2237 cp->h_len = len;
2238 cp->harshness = (struct harshness_code *)
2239 alloca ((len + 1) * sizeof (struct harshness_code));
2240
2241 if (DECL_STATIC_FUNCTION_P (function))
2242 these_parms = TREE_CHAIN (these_parms);
2243 compute_conversion_costs (function, these_parms, cp, len);
2244
2245 if ((cp->h.code & EVIL_CODE) == 0)
2246 {
2247 cp->u.field = function;
2248 cp->function = function;
2249 cp->basetypes = basetype_path;
2250
2251 /* Don't allow non-converting constructors to convert. */
2252 if (flags & LOOKUP_ONLYCONVERTING
2253 && DECL_LANG_SPECIFIC (function)
2254 && DECL_NONCONVERTING_P (function))
2255 continue;
2256
2257 /* No "two-level" conversions. */
2258 if (flags & LOOKUP_NO_CONVERSION
2259 && (cp->h.code & USER_CODE))
2260 continue;
2261
2262 cp++;
2263 }
2264 }
2265 }
2266 /* Now we have run through one link's member functions.
2267 arrange to head-insert this link's links. */
2268 baselink = next_baselink (baselink);
2269 b_or_d += 1;
2270 /* Don't grab functions from base classes. lookup_fnfield will
2271 do the work to get us down into the right place. */
2272 baselink = NULL_TREE;
2273 }
2274 if (pass == 0)
2275 {
2276 tree igv = lookup_name_nonclass (name);
2277
2278 /* No exact match could be found. Now try to find match
2279 using default conversions. */
2280 if ((flags & LOOKUP_GLOBAL) && igv)
2281 {
2282 if (TREE_CODE (igv) == FUNCTION_DECL)
2283 ever_seen += 1;
2284 else if (TREE_CODE (igv) == TREE_LIST)
2285 ever_seen += count_functions (igv);
2286 }
2287
2288 if (ever_seen == 0)
2289 {
2290 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2291 == LOOKUP_SPECULATIVELY)
2292 return NULL_TREE;
2293
2294 TREE_CHAIN (last) = void_list_node;
2295 if (flags & LOOKUP_GLOBAL)
2296 cp_error ("no global or member function `%D(%A)' defined",
2297 name, parmtypes);
2298 else
2299 cp_error ("no member function `%T::%D(%A)' defined",
2300 save_basetype, name, TREE_CHAIN (parmtypes));
2301 return error_mark_node;
2302 }
2303 continue;
2304 }
2305
2306 if (cp - candidates != 0)
2307 {
2308 /* Rank from worst to best. Then cp will point to best one.
2309 Private fields have their bits flipped. For unsigned
2310 numbers, this should make them look very large.
2311 If the best alternate has a (signed) negative value,
2312 then all we ever saw were private members. */
2313 if (cp - candidates > 1)
2314 {
2315 int n_candidates = cp - candidates;
2316 extern int warn_synth;
2317 TREE_VALUE (parms) = instance_ptr;
2318 cp = ideal_candidate (candidates, n_candidates, len);
2319 if (cp == (struct candidate *)0)
2320 {
2321 if (flags & LOOKUP_COMPLAIN)
2322 {
2323 TREE_CHAIN (last) = void_list_node;
2324 cp_error ("call of overloaded %s `%D(%A)' is ambiguous",
2325 name_kind, name, TREE_CHAIN (parmtypes));
2326 print_n_candidates (candidates, n_candidates);
2327 }
2328 return error_mark_node;
2329 }
2330 if (cp->h.code & EVIL_CODE)
2331 return error_mark_node;
2332 if (warn_synth
2333 && DECL_NAME (cp->function) == ansi_opname[MODIFY_EXPR]
2334 && DECL_ARTIFICIAL (cp->function)
2335 && n_candidates == 2)
2336 {
2337 cp_warning ("using synthesized `%#D' for copy assignment",
2338 cp->function);
2339 cp_warning_at (" where cfront would use `%#D'",
2340 candidates->function);
2341 }
2342 }
2343 else if (cp[-1].h.code & EVIL_CODE)
2344 {
2345 if (flags & LOOKUP_COMPLAIN)
2346 cp_error ("ambiguous type conversion requested for %s `%D'",
2347 name_kind, name);
2348 return error_mark_node;
2349 }
2350 else
2351 cp--;
2352
2353 /* The global function was the best, so use it. */
2354 if (cp->u.field == 0)
2355 {
2356 /* We must convert the instance pointer into a reference type.
2357 Global overloaded functions can only either take
2358 aggregate objects (which come for free from references)
2359 or reference data types anyway. */
2360 TREE_VALUE (parms) = copy_node (instance_ptr);
2361 TREE_TYPE (TREE_VALUE (parms)) = build_reference_type (TREE_TYPE (TREE_TYPE (instance_ptr)));
2362 return build_function_call (cp->function, parms);
2363 }
2364
2365 function = cp->function;
2366 basetype_path = cp->basetypes;
2367 if (! DECL_STATIC_FUNCTION_P (function))
2368 TREE_VALUE (parms) = cp->arg;
2369 goto found_and_maybe_warn;
2370 }
2371
2372 if (flags & (LOOKUP_COMPLAIN|LOOKUP_SPECULATIVELY))
2373 {
2374 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2375 == LOOKUP_SPECULATIVELY)
2376 return NULL_TREE;
2377
2378 if (DECL_STATIC_FUNCTION_P (cp->function))
2379 parms = TREE_CHAIN (parms);
2380 if (ever_seen)
2381 {
2382 if (flags & LOOKUP_SPECULATIVELY)
2383 return NULL_TREE;
2384 if (static_call_context
2385 && TREE_CODE (TREE_TYPE (cp->function)) == METHOD_TYPE)
2386 cp_error ("object missing in call to `%D'", cp->function);
2387 else if (ever_seen > 1)
2388 {
2389 TREE_CHAIN (last) = void_list_node;
2390 cp_error ("no matching function for call to `%T::%D (%A)%V'",
2391 TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (instance_ptr))),
2392 name, TREE_CHAIN (parmtypes),
2393 TREE_TYPE (TREE_TYPE (instance_ptr)));
2394 TREE_CHAIN (last) = NULL_TREE;
2395 print_candidates (found_fns);
2396 }
2397 else
2398 report_type_mismatch (cp, parms, name_kind);
2399 return error_mark_node;
2400 }
2401
2402 if ((flags & (LOOKUP_SPECULATIVELY|LOOKUP_COMPLAIN))
2403 == LOOKUP_COMPLAIN)
2404 {
2405 cp_error ("%T has no method named %D", save_basetype, name);
2406 return error_mark_node;
2407 }
2408 return NULL_TREE;
2409 }
2410 continue;
2411
2412 found_and_maybe_warn:
2413 if ((cp->harshness[0].code & CONST_CODE)
2414 /* 12.1p2: Constructors can be called for const objects. */
2415 && ! DECL_CONSTRUCTOR_P (cp->function))
2416 {
2417 if (flags & LOOKUP_COMPLAIN)
2418 {
2419 cp_error_at ("non-const member function `%D'", cp->function);
2420 error ("called for const object at this point in file");
2421 }
2422 /* Not good enough for a match. */
2423 else
2424 return error_mark_node;
2425 }
2426 goto found;
2427 }
2428 /* Silently return error_mark_node. */
2429 return error_mark_node;
2430
2431 found:
2432 if (flags & LOOKUP_PROTECT)
2433 access = compute_access (basetype_path, function);
2434
2435 if (access == access_private_node)
2436 {
2437 if (flags & LOOKUP_COMPLAIN)
2438 {
2439 cp_error_at ("%s `%+#D' is %s", name_kind, function,
2440 TREE_PRIVATE (function) ? "private"
2441 : "from private base class");
2442 error ("within this context");
2443 }
2444 return error_mark_node;
2445 }
2446 else if (access == access_protected_node)
2447 {
2448 if (flags & LOOKUP_COMPLAIN)
2449 {
2450 cp_error_at ("%s `%+#D' %s", name_kind, function,
2451 TREE_PROTECTED (function) ? "is protected"
2452 : "has protected accessibility");
2453 error ("within this context");
2454 }
2455 return error_mark_node;
2456 }
2457
2458 /* From here on down, BASETYPE is the type that INSTANCE_PTR's
2459 type (if it exists) is a pointer to. */
2460
2461 if (DECL_ABSTRACT_VIRTUAL_P (function)
2462 && instance == C_C_D
2463 && DECL_CONSTRUCTOR_P (current_function_decl)
2464 && ! (flags & LOOKUP_NONVIRTUAL)
2465 && value_member (function, get_abstract_virtuals (basetype)))
2466 cp_error ("abstract virtual `%#D' called from constructor", function);
2467
2468 if (IS_SIGNATURE (basetype) && static_call_context)
2469 {
2470 cp_error ("cannot call signature member function `%T::%D' without signature pointer/reference",
2471 basetype, name);
2472 return error_mark_node;
2473 }
2474 else if (IS_SIGNATURE (basetype))
2475 return build_signature_method_call (basetype, instance, function, parms);
2476
2477 function = DECL_MAIN_VARIANT (function);
2478 mark_used (function);
2479
2480 /* Is it a synthesized method that needs to be synthesized? */
2481 if (DECL_ARTIFICIAL (function) && ! DECL_INITIAL (function)
2482 /* Kludge: don't synthesize for default args. */
2483 && current_function_decl)
2484 synthesize_method (function);
2485
2486 if (pedantic && DECL_THIS_INLINE (function) && ! DECL_ARTIFICIAL (function)
2487 && ! DECL_INITIAL (function) && ! DECL_PENDING_INLINE_INFO (function))
2488 cp_warning ("inline function `%#D' called before definition", function);
2489
2490 fntype = TREE_TYPE (function);
2491 if (TREE_CODE (fntype) == POINTER_TYPE)
2492 fntype = TREE_TYPE (fntype);
2493 basetype = DECL_CLASS_CONTEXT (function);
2494
2495 /* If we are referencing a virtual function from an object
2496 of effectively static type, then there is no need
2497 to go through the virtual function table. */
2498 if (need_vtbl == maybe_needed)
2499 {
2500 int fixed_type = resolves_to_fixed_type_p (instance, 0);
2501
2502 if (all_virtual == 1
2503 && DECL_VINDEX (function)
2504 && may_be_remote (basetype))
2505 need_vtbl = needed;
2506 else if (DECL_VINDEX (function))
2507 need_vtbl = fixed_type ? unneeded : needed;
2508 else
2509 need_vtbl = not_needed;
2510 }
2511
2512 if (TREE_CODE (fntype) == METHOD_TYPE && static_call_context
2513 && !DECL_CONSTRUCTOR_P (function))
2514 {
2515 /* Let's be nice to the user for now, and give reasonable
2516 default behavior. */
2517 instance_ptr = current_class_decl;
2518 if (instance_ptr)
2519 {
2520 if (basetype != current_class_type)
2521 {
2522 tree binfo = get_binfo (basetype, current_class_type, 1);
2523 if (binfo == NULL_TREE)
2524 {
2525 error_not_base_type (function, current_class_type);
2526 return error_mark_node;
2527 }
2528 else if (basetype == error_mark_node)
2529 return error_mark_node;
2530 }
2531 }
2532 /* Only allow a static member function to call another static member
2533 function. */
2534 else if (DECL_LANG_SPECIFIC (function)
2535 && !DECL_STATIC_FUNCTION_P (function))
2536 {
2537 cp_error ("cannot call member function `%D' without object",
2538 function);
2539 return error_mark_node;
2540 }
2541 }
2542
2543 value_type = TREE_TYPE (fntype) ? TREE_TYPE (fntype) : void_type_node;
2544
2545 if (TYPE_SIZE (complete_type (value_type)) == 0)
2546 {
2547 if (flags & LOOKUP_COMPLAIN)
2548 incomplete_type_error (0, value_type);
2549 return error_mark_node;
2550 }
2551
2552 if (DECL_STATIC_FUNCTION_P (function))
2553 parms = convert_arguments (NULL_TREE, TYPE_ARG_TYPES (fntype),
2554 TREE_CHAIN (parms), function, LOOKUP_NORMAL);
2555 else if (need_vtbl == unneeded)
2556 {
2557 int sub_flags = DECL_CONSTRUCTOR_P (function) ? flags : LOOKUP_NORMAL;
2558 basetype = TREE_TYPE (instance);
2559 if (TYPE_METHOD_BASETYPE (TREE_TYPE (function)) != TYPE_MAIN_VARIANT (basetype)
2560 && TYPE_USES_COMPLEX_INHERITANCE (basetype))
2561 {
2562 basetype = DECL_CLASS_CONTEXT (function);
2563 instance_ptr = convert_pointer_to (basetype, instance_ptr);
2564 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2565 }
2566 parms = tree_cons (NULL_TREE, instance_ptr,
2567 convert_arguments (NULL_TREE, TREE_CHAIN (TYPE_ARG_TYPES (fntype)), TREE_CHAIN (parms), function, sub_flags));
2568 }
2569 else
2570 {
2571 if ((flags & LOOKUP_NONVIRTUAL) == 0)
2572 basetype = DECL_CONTEXT (function);
2573
2574 /* First parm could be integer_zerop with casts like
2575 ((Object*)0)->Object::IsA() */
2576 if (!integer_zerop (TREE_VALUE (parms)))
2577 {
2578 /* Since we can't have inheritance with a union, doing get_binfo
2579 on it won't work. We do all the convert_pointer_to_real
2580 stuff to handle MI correctly...for unions, that's not
2581 an issue, so we must short-circuit that extra work here. */
2582 tree tmp = TREE_TYPE (TREE_TYPE (TREE_VALUE (parms)));
2583 if (tmp != NULL_TREE && TREE_CODE (tmp) == UNION_TYPE)
2584 instance_ptr = TREE_VALUE (parms);
2585 else
2586 {
2587 tree binfo = get_binfo (basetype,
2588 TREE_TYPE (TREE_TYPE (TREE_VALUE (parms))),
2589 0);
2590 instance_ptr = convert_pointer_to_real (binfo, TREE_VALUE (parms));
2591 }
2592 instance_ptr
2593 = convert_pointer_to (build_type_variant (basetype,
2594 constp, volatilep),
2595 instance_ptr);
2596
2597 if (TREE_CODE (instance_ptr) == COND_EXPR)
2598 {
2599 instance_ptr = save_expr (instance_ptr);
2600 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2601 }
2602 else if (TREE_CODE (instance_ptr) == NOP_EXPR
2603 && TREE_CODE (TREE_OPERAND (instance_ptr, 0)) == ADDR_EXPR
2604 && TREE_OPERAND (TREE_OPERAND (instance_ptr, 0), 0) == instance)
2605 ;
2606 /* The call to `convert_pointer_to' may return error_mark_node. */
2607 else if (TREE_CODE (instance_ptr) == ERROR_MARK)
2608 return instance_ptr;
2609 else if (instance == NULL_TREE
2610 || TREE_CODE (instance) != INDIRECT_REF
2611 || TREE_OPERAND (instance, 0) != instance_ptr)
2612 instance = build_indirect_ref (instance_ptr, NULL_PTR);
2613 }
2614 parms = tree_cons (NULL_TREE, instance_ptr,
2615 convert_arguments (NULL_TREE, TREE_CHAIN (TYPE_ARG_TYPES (fntype)), TREE_CHAIN (parms), function, LOOKUP_NORMAL));
2616 }
2617
2618 #if 0
2619 /* Constructors do not overload method calls. */
2620 else if (TYPE_OVERLOADS_METHOD_CALL_EXPR (basetype)
2621 && name != TYPE_IDENTIFIER (basetype)
2622 && (TREE_CODE (function) != FUNCTION_DECL
2623 || strncmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)),
2624 OPERATOR_METHOD_FORMAT,
2625 OPERATOR_METHOD_LENGTH))
2626 && (may_be_remote (basetype) || instance != C_C_D))
2627 {
2628 tree fn_as_int;
2629
2630 parms = TREE_CHAIN (parms);
2631
2632 if (!all_virtual && TREE_CODE (function) == FUNCTION_DECL)
2633 fn_as_int = build_unary_op (ADDR_EXPR, function, 0);
2634 else
2635 fn_as_int = convert (TREE_TYPE (default_conversion (function)), DECL_VINDEX (function));
2636 if (all_virtual == 1)
2637 fn_as_int = convert (integer_type_node, fn_as_int);
2638
2639 result = build_opfncall (METHOD_CALL_EXPR, LOOKUP_NORMAL, instance, fn_as_int, parms);
2640
2641 if (result == NULL_TREE)
2642 {
2643 compiler_error ("could not overload `operator->()(...)'");
2644 return error_mark_node;
2645 }
2646 else if (result == error_mark_node)
2647 return error_mark_node;
2648
2649 #if 0
2650 /* Do this if we want the result of operator->() to inherit
2651 the type of the function it is subbing for. */
2652 TREE_TYPE (result) = value_type;
2653 #endif
2654
2655 return result;
2656 }
2657 #endif
2658
2659 if (parms == error_mark_node
2660 || (parms && TREE_CHAIN (parms) == error_mark_node))
2661 return error_mark_node;
2662
2663 if (need_vtbl == needed)
2664 {
2665 function = build_vfn_ref (&TREE_VALUE (parms), instance,
2666 DECL_VINDEX (function));
2667 TREE_TYPE (function) = build_pointer_type (fntype);
2668 }
2669
2670 if (TREE_CODE (function) == FUNCTION_DECL)
2671 GNU_xref_call (current_function_decl,
2672 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (function)));
2673
2674 {
2675 int is_constructor;
2676
2677 if (TREE_CODE (function) == FUNCTION_DECL)
2678 {
2679 is_constructor = DECL_CONSTRUCTOR_P (function);
2680 function = default_conversion (function);
2681 }
2682 else
2683 {
2684 is_constructor = 0;
2685 function = default_conversion (function);
2686 }
2687
2688 result = build_nt (CALL_EXPR, function, parms, NULL_TREE);
2689
2690 TREE_TYPE (result) = value_type;
2691 TREE_SIDE_EFFECTS (result) = 1;
2692 TREE_HAS_CONSTRUCTOR (result) = is_constructor;
2693 result = convert_from_reference (result);
2694 return result;
2695 }
2696 }
2697
2698 /* Similar to `build_method_call', but for overloaded non-member functions.
2699 The name of this function comes through NAME. The name depends
2700 on PARMS.
2701
2702 Note that this function must handle simple `C' promotions,
2703 as well as variable numbers of arguments (...), and
2704 default arguments to boot.
2705
2706 If the overloading is successful, we return a tree node which
2707 contains the call to the function.
2708
2709 If overloading produces candidates which are probable, but not definite,
2710 we hold these candidates. If FINAL_CP is non-zero, then we are free
2711 to assume that final_cp points to enough storage for all candidates that
2712 this function might generate. The `harshness' array is preallocated for
2713 the first candidate, but not for subsequent ones.
2714
2715 Note that the DECL_RTL of FUNCTION must be made to agree with this
2716 function's new name. */
2717
2718 tree
2719 build_overload_call_real (fnname, parms, flags, final_cp, buildxxx)
2720 tree fnname, parms;
2721 int flags;
2722 struct candidate *final_cp;
2723 int buildxxx;
2724 {
2725 /* must check for overloading here */
2726 tree functions, function, parm;
2727 tree parmtypes = NULL_TREE, last = NULL_TREE;
2728 register tree outer;
2729 int length;
2730 int parmlength = list_length (parms);
2731
2732 struct candidate *candidates, *cp;
2733
2734 if (final_cp)
2735 {
2736 final_cp[0].h.code = 0;
2737 final_cp[0].h.distance = 0;
2738 final_cp[0].function = 0;
2739 /* end marker. */
2740 final_cp[1].h.code = EVIL_CODE;
2741 }
2742
2743 for (parm = parms; parm; parm = TREE_CHAIN (parm))
2744 {
2745 register tree t = TREE_TYPE (TREE_VALUE (parm));
2746
2747 if (t == error_mark_node)
2748 {
2749 if (final_cp)
2750 final_cp->h.code = EVIL_CODE;
2751 return error_mark_node;
2752 }
2753 if (TREE_CODE (t) == OFFSET_TYPE)
2754 #if 0
2755 /* This breaks reference-to-array parameters. */
2756 || TREE_CODE (t) == ARRAY_TYPE
2757 #endif
2758 {
2759 /* Perform the conversion from ARRAY_TYPE to POINTER_TYPE in place.
2760 Also convert OFFSET_TYPE entities to their normal selves.
2761 This eliminates needless calls to `compute_conversion_costs'. */
2762 TREE_VALUE (parm) = default_conversion (TREE_VALUE (parm));
2763 t = TREE_TYPE (TREE_VALUE (parm));
2764 }
2765 last = build_tree_list (NULL_TREE, t);
2766 parmtypes = chainon (parmtypes, last);
2767 }
2768 if (last)
2769 TREE_CHAIN (last) = void_list_node;
2770 else
2771 parmtypes = void_list_node;
2772
2773 if (is_overloaded_fn (fnname))
2774 {
2775 functions = fnname;
2776 if (TREE_CODE (fnname) == TREE_LIST)
2777 fnname = TREE_PURPOSE (functions);
2778 else if (TREE_CODE (fnname) == FUNCTION_DECL)
2779 fnname = DECL_NAME (functions);
2780 }
2781 else
2782 functions = lookup_name_nonclass (fnname);
2783
2784 if (functions == NULL_TREE)
2785 {
2786 if (flags & LOOKUP_SPECULATIVELY)
2787 return NULL_TREE;
2788 if (flags & LOOKUP_COMPLAIN)
2789 error ("only member functions apply");
2790 if (final_cp)
2791 final_cp->h.code = EVIL_CODE;
2792 return error_mark_node;
2793 }
2794
2795 if (TREE_CODE (functions) == FUNCTION_DECL && ! IDENTIFIER_OPNAME_P (fnname))
2796 {
2797 functions = DECL_MAIN_VARIANT (functions);
2798 if (final_cp)
2799 {
2800 /* We are just curious whether this is a viable alternative or
2801 not. */
2802 compute_conversion_costs (functions, parms, final_cp, parmlength);
2803 return functions;
2804 }
2805 else
2806 return build_function_call_real (functions, parms, 1, flags);
2807 }
2808
2809 if (TREE_CODE (functions) == TREE_LIST
2810 && TREE_VALUE (functions) == NULL_TREE)
2811 {
2812 if (flags & LOOKUP_SPECULATIVELY)
2813 return NULL_TREE;
2814
2815 if (flags & LOOKUP_COMPLAIN)
2816 cp_error ("function `%D' declared overloaded, but no instances of that function declared",
2817 TREE_PURPOSE (functions));
2818 if (final_cp)
2819 final_cp->h.code = EVIL_CODE;
2820 return error_mark_node;
2821 }
2822
2823 length = count_functions (functions);
2824
2825 if (final_cp)
2826 candidates = final_cp;
2827 else
2828 {
2829 candidates
2830 = (struct candidate *)alloca ((length+1) * sizeof (struct candidate));
2831 bzero ((char *) candidates, (length + 1) * sizeof (struct candidate));
2832 }
2833
2834 cp = candidates;
2835
2836 my_friendly_assert (is_overloaded_fn (functions), 169);
2837
2838 functions = get_first_fn (functions);
2839
2840 /* OUTER is the list of FUNCTION_DECLS, in a TREE_LIST. */
2841 for (outer = functions; outer; outer = DECL_CHAIN (outer))
2842 {
2843 int template_cost = 0;
2844 function = outer;
2845 if (TREE_CODE (function) != FUNCTION_DECL
2846 && ! (TREE_CODE (function) == TEMPLATE_DECL
2847 && TREE_CODE (DECL_TEMPLATE_RESULT (function)) == FUNCTION_DECL))
2848 {
2849 enum tree_code code = TREE_CODE (function);
2850 if (code == TEMPLATE_DECL)
2851 code = TREE_CODE (DECL_TEMPLATE_RESULT (function));
2852 if (code == CONST_DECL)
2853 cp_error_at
2854 ("enumeral value `%D' conflicts with function of same name",
2855 function);
2856 else if (code == VAR_DECL)
2857 {
2858 if (TREE_STATIC (function))
2859 cp_error_at
2860 ("variable `%D' conflicts with function of same name",
2861 function);
2862 else
2863 cp_error_at
2864 ("constant field `%D' conflicts with function of same name",
2865 function);
2866 }
2867 else if (code == TYPE_DECL)
2868 continue;
2869 else
2870 my_friendly_abort (2);
2871 error ("at this point in file");
2872 continue;
2873 }
2874 if (TREE_CODE (function) == TEMPLATE_DECL)
2875 {
2876 int ntparms = TREE_VEC_LENGTH (DECL_TEMPLATE_PARMS (function));
2877 tree *targs = (tree *) alloca (sizeof (tree) * ntparms);
2878 int i;
2879
2880 i = type_unification (DECL_TEMPLATE_PARMS (function), targs,
2881 TYPE_ARG_TYPES (TREE_TYPE (function)),
2882 parms, &template_cost, 0);
2883 if (i == 0)
2884 {
2885 function = instantiate_template (function, targs);
2886 if (function == error_mark_node)
2887 return function;
2888 }
2889 }
2890
2891 if (TREE_CODE (function) == TEMPLATE_DECL)
2892 {
2893 /* Unconverted template -- failed match. */
2894 cp->function = function;
2895 cp->u.bad_arg = -4;
2896 cp->h.code = EVIL_CODE;
2897 }
2898 else
2899 {
2900 struct candidate *cp2;
2901
2902 /* Check that this decl is not the same as a function that's in
2903 the list due to some template instantiation. */
2904 cp2 = candidates;
2905 while (cp2 != cp)
2906 if (cp2->function == function)
2907 break;
2908 else
2909 cp2 += 1;
2910 if (cp2->function == function)
2911 continue;
2912
2913 function = DECL_MAIN_VARIANT (function);
2914
2915 /* Can't use alloca here, since result might be
2916 passed to calling function. */
2917 cp->h_len = parmlength;
2918 cp->harshness = (struct harshness_code *)
2919 oballoc ((parmlength + 1) * sizeof (struct harshness_code));
2920
2921 compute_conversion_costs (function, parms, cp, parmlength);
2922
2923 /* Make sure this is clear as well. */
2924 cp->h.int_penalty += template_cost;
2925
2926 if ((cp[0].h.code & EVIL_CODE) == 0)
2927 {
2928 cp[1].h.code = EVIL_CODE;
2929 cp++;
2930 }
2931 }
2932 }
2933
2934 if (cp - candidates)
2935 {
2936 tree rval = error_mark_node;
2937
2938 /* Leave marker. */
2939 cp[0].h.code = EVIL_CODE;
2940 if (cp - candidates > 1)
2941 {
2942 struct candidate *best_cp
2943 = ideal_candidate (candidates, cp - candidates, parmlength);
2944 if (best_cp == (struct candidate *)0)
2945 {
2946 if (flags & LOOKUP_COMPLAIN)
2947 {
2948 cp_error ("call of overloaded `%D' is ambiguous", fnname);
2949 print_n_candidates (candidates, cp - candidates);
2950 }
2951 return error_mark_node;
2952 }
2953 else
2954 rval = best_cp->function;
2955 }
2956 else
2957 {
2958 cp -= 1;
2959 if (cp->h.code & EVIL_CODE)
2960 {
2961 if (flags & LOOKUP_COMPLAIN)
2962 error ("type conversion ambiguous");
2963 }
2964 else
2965 rval = cp->function;
2966 }
2967
2968 if (final_cp)
2969 return rval;
2970
2971 return buildxxx ? build_function_call_real (rval, parms, 0, flags)
2972 : build_function_call_real (rval, parms, 1, flags);
2973 }
2974
2975 if (flags & LOOKUP_SPECULATIVELY)
2976 return NULL_TREE;
2977
2978 if (flags & LOOKUP_COMPLAIN)
2979 report_type_mismatch (cp, parms, "function",
2980 decl_as_string (cp->function, 1));
2981
2982 return error_mark_node;
2983 }
2984
2985 tree
2986 build_overload_call (fnname, parms, flags, final_cp)
2987 tree fnname, parms;
2988 int flags;
2989 struct candidate *final_cp;
2990 {
2991 return build_overload_call_real (fnname, parms, flags, final_cp, 0);
2992 }
2993
2994 tree
2995 build_overload_call_maybe (fnname, parms, flags, final_cp)
2996 tree fnname, parms;
2997 int flags;
2998 struct candidate *final_cp;
2999 {
3000 return build_overload_call_real (fnname, parms, flags, final_cp, 1);
3001 }