* cp-tree.h (OVL_CHAIN): Check looking at OVERLOAD.
(ovl_iterator): Add allow_inner field. Adjust ctor. Make
unduplicatable.
(ovl_iterator::maybe_push, ovl_iterator::pop): New.
(lkp_iterator): Add outer field. Adjust ctor.
(lkp_iterator::operator++): New.
(lookup_mark, lookup_maybe_add): Declare.
* name-lookup.c (name_lookup): Delete fn_set member.
(name_lookup::preserve_state, name_lookup::restore_state): Unmark
and mark lookup.
(name_lookup::add_value): Use lookup_add directly.
(name_lookup::add_fns: Use lookup_maybe_add.
(name_lookup::search_adl): Mark and unmark fns.
(pushdecl): Adjust.
* pt.c (check_explicit_specialization): Use lookup_add directly.
* ptree.c (cxx_print_xnode): Show complete overload structure.
* tree.c (lookup_mark, lookup_maybe_add): New.
From-SVN: r248517
2017-05-26 Nathan Sidwell <nathan@acm.org>
+ * cp-tree.h (OVL_CHAIN): Check looking at OVERLOAD.
+ (ovl_iterator): Add allow_inner field. Adjust ctor. Make
+ unduplicatable.
+ (ovl_iterator::maybe_push, ovl_iterator::pop): New.
+ (lkp_iterator): Add outer field. Adjust ctor.
+ (lkp_iterator::operator++): New.
+ (lookup_mark, lookup_maybe_add): Declare.
+ * name-lookup.c (name_lookup): Delete fn_set member.
+ (name_lookup::preserve_state, name_lookup::restore_state): Unmark
+ and mark lookup.
+ (name_lookup::add_value): Use lookup_add directly.
+ (name_lookup::add_fns: Use lookup_maybe_add.
+ (name_lookup::search_adl): Mark and unmark fns.
+ (pushdecl): Adjust.
+ * pt.c (check_explicit_specialization): Use lookup_add directly.
+ * ptree.c (cxx_print_xnode): Show complete overload structure.
+ * tree.c (lookup_mark, lookup_maybe_add): New.
+
* name-lookup.c (name_lookup::search_adl): ADL OMP UDR type args.
2017-05-26 Jakub Jelinek <jakub@redhat.com>
Other users should use iterators and convenience functions. */
#define OVL_FUNCTION(NODE) \
(((struct tree_overload*)OVERLOAD_CHECK (NODE))->function)
-#define OVL_CHAIN(NODE) TREE_CHAIN (NODE)
+#define OVL_CHAIN(NODE) \
+ (((struct tree_overload*)OVERLOAD_CHECK (NODE))->common.chain)
/* If set, this was imported in a using declaration. */
#define OVL_USING_P(NODE) TREE_LANG_FLAG_1 (OVERLOAD_CHECK (NODE))
#define OVL_SINGLE_P(NODE) \
(TREE_CODE (NODE) != OVERLOAD || !OVL_CHAIN (NODE))
+/* OVL_HIDDEN_P nodes come first, then OVL_USING_P nodes, then regular
+ fns. */
+
struct GTY(()) tree_overload {
struct tree_common common;
tree function;
class ovl_iterator
{
tree ovl;
+ const bool allow_inner; /* Only used when checking. */
public:
- ovl_iterator (tree o)
- :ovl (o)
- {}
-
- ovl_iterator &operator= (const ovl_iterator &from)
+ explicit ovl_iterator (tree o, bool allow = false)
+ : ovl (o), allow_inner (allow)
{
- ovl = from.ovl;
-
- return *this;
}
+ private:
+ /* Do not duplicate. */
+ ovl_iterator &operator= (const ovl_iterator &);
+ ovl_iterator (const ovl_iterator &);
+
public:
operator bool () const
{
tree fn = TREE_CODE (ovl) != OVERLOAD ? ovl : OVL_FUNCTION (ovl);
/* Check this is not an unexpected 2-dimensional overload. */
- gcc_checking_assert (TREE_CODE (fn) != OVERLOAD);
+ gcc_checking_assert (allow_inner || TREE_CODE (fn) != OVERLOAD);
return fn;
}
return reveal_node (head, ovl);
}
+ protected:
+ /* If we have a nested overload, point at the inner overload and
+ return the next link on the outer one. */
+ tree maybe_push ()
+ {
+ tree r = NULL_TREE;
+
+ if (ovl && TREE_CODE (ovl) == OVERLOAD && OVL_NESTED_P (ovl))
+ {
+ r = OVL_CHAIN (ovl);
+ ovl = OVL_FUNCTION (ovl);
+ }
+ return r;
+ }
+ /* Restore an outer nested overload. */
+ void pop (tree outer)
+ {
+ gcc_checking_assert (!ovl);
+ ovl = outer;
+ }
+
private:
/* We make these static functions to avoid the address of the
iterator escaping the local context. */
/* Iterator over a (potentially) 2 dimensional overload, which is
produced by name lookup. */
-/* Note this is currently a placeholder, as the name-lookup changes
- are not yet committed. */
-
class lkp_iterator : public ovl_iterator
{
typedef ovl_iterator parent;
+ tree outer;
+
+ public:
+ explicit lkp_iterator (tree o)
+ : parent (o, true), outer (maybe_push ())
+ {
+ }
+
public:
- lkp_iterator (tree o)
- : parent (o)
+ lkp_iterator &operator++ ()
{
+ bool repush = !outer;
+
+ if (!parent::operator++ () && !repush)
+ {
+ pop (outer);
+ repush = true;
+ }
+
+ if (repush)
+ outer = maybe_push ();
+
+ return *this;
}
};
extern tree ovl_insert (tree fn, tree maybe_ovl,
bool using_p = false);
extern tree ovl_skip_hidden (tree) ATTRIBUTE_PURE;
+extern void lookup_mark (tree lookup, bool val);
extern tree lookup_add (tree fns, tree lookup);
+extern tree lookup_maybe_add (tree fns, tree lookup);
extern void lookup_keep (tree lookup, bool keep);
extern int is_overloaded_fn (tree) ATTRIBUTE_PURE;
extern bool really_overloaded_fn (tree) ATTRIBUTE_PURE;
int flags; /* Lookup flags. */
vec<tree, va_heap, vl_embed> *scopes;
name_lookup *previous; /* Previously active lookup. */
- hash_set<tree> *fn_set;
protected:
/* Marked scope stack for outermost name lookup. */
public:
name_lookup (tree n, int f = 0)
: name (n), value (NULL_TREE), type (NULL_TREE), flags (f),
- scopes (NULL), previous (NULL), fn_set (NULL)
+ scopes (NULL), previous (NULL)
{
preserve_state ();
}
~name_lookup ()
{
- gcc_checking_assert (!fn_set);
restore_state ();
}
previous->scopes->quick_push (decl);
}
}
+
+ /* Unmark the outer partial lookup. */
+ lookup_mark (previous->value, false);
}
else
scopes = shared_scopes;
active = previous;
if (previous)
{
+ free (scopes);
+
unsigned length = vec_safe_length (previous->scopes);
for (unsigned ix = 0; ix != length; ix++)
{
LOOKUP_SEEN_P (decl) = true;
}
- free (scopes);
+ /* Remark the outer partial lookup. */
+ lookup_mark (previous->value, true);
}
else
shared_scopes = scopes;
&& same_type_p (TREE_TYPE (value), TREE_TYPE (new_val))))
;
else if (OVL_P (value) && OVL_P (new_val))
- {
- for (ovl_iterator iter (new_val); iter; ++iter)
- value = lookup_add (*iter, value);
- }
+ value = lookup_add (new_val, value);
else
value = ambiguous (new_val, value);
}
return;
/* Only add those that aren't already there. */
- for (ovl_iterator iter (fns); iter; ++iter)
- if (!fn_set->add (*iter))
- value = lookup_add (*iter, value);
+ value = lookup_maybe_add (fns, value);
}
/* Add functions of a namespace to the lookup structure. */
tree
name_lookup::search_adl (tree fns, vec<tree, va_gc> *args)
{
+ lookup_mark (fns, true);
value = fns;
- /* Add the current overload set into the hash table. */
- fn_set = new hash_set<tree>;
- for (lkp_iterator iter (fns); iter; ++iter)
- fn_set->add (*iter);
-
unsigned ix;
tree arg;
else
adl_expr (arg);
- delete fn_set;
- fn_set = NULL;
-
fns = value;
+ lookup_mark (fns, false);
return fns;
}
inform (DECL_SOURCE_LOCATION (shadowed), "shadowed declaration is here");
}
-
/* DECL is being pushed inside function CTX. Set its context, if
needed. */
}
/* Record a decl-node X as belonging to the current lexical scope.
- It's a friend if IS_FRIEND is true. */
+ It's a friend if IS_FRIEND is true -- which affects exactly where
+ we push it. */
tree
pushdecl (tree x, bool is_friend)
{
- tree ret;
bool subtime = timevar_cond_start (TV_NAME_LOOKUP);
- ret = do_pushdecl (x, is_friend);
+ tree ret = do_pushdecl (x, is_friend);
timevar_cond_stop (TV_NAME_LOOKUP, subtime);
return ret;
}
supplement_binding (binding, val);
}
-/* Set value binding og NAME in the global namespace to VAL. Does not
+/* Set value binding of NAME in the global namespace to VAL. Does not
add it to the list of things in the namespace. */
void
/* Glue all these conversion functions together
with those we already have. */
- for (ovl_iterator iter (ovl); iter; ++iter)
- fns = lookup_add (*iter, fns);
+ fns = lookup_add (ovl, fns);
}
}
break;
case OVERLOAD:
print_node (file, "name", OVL_NAME (node), indent+4);
- for (lkp_iterator iter (node); iter; ++iter)
- print_node (file, "function", *iter, indent+4);
+ for (ovl_iterator iter (node, true); iter; ++iter)
+ print_node (file,
+ TREE_CODE (*iter) == OVERLOAD ? "inner" : "function",
+ *iter, indent+4);
break;
case TEMPLATE_PARM_INDEX:
print_node (file, "decl", TEMPLATE_PARM_DECL (node), indent+4);
}
/* Add FN to the (potentially NULL) overload set OVL. USING_P is
- true, if FN is via a using declaration. Overloads are ordered as
- using, regular. */
+ true, if FN is via a using declaration. We also pay attention to
+ DECL_HIDDEN. Overloads are ordered as hidden, using, regular. */
tree
ovl_insert (tree fn, tree maybe_ovl, bool using_p)
return overload;
}
+/* Mark or unmark a lookup set. */
+
+void
+lookup_mark (tree ovl, bool val)
+{
+ /* For every node that is a lookup, mark the thing it points to. */
+ for (; ovl && TREE_CODE (ovl) == OVERLOAD && OVL_LOOKUP_P (ovl);
+ ovl = OVL_CHAIN (ovl))
+ {
+ tree targ = OVL_FUNCTION (ovl);
+ gcc_checking_assert (LOOKUP_SEEN_P (targ) != val);
+ LOOKUP_SEEN_P (targ) = val;
+ }
+
+ if (ovl && (TREE_CODE (ovl) == OVERLOAD ||
+ TREE_CODE (ovl) == FUNCTION_DECL))
+ {
+ /* Mark the overload itsef. */
+ gcc_checking_assert (LOOKUP_SEEN_P (ovl) != val);
+ LOOKUP_SEEN_P (ovl) = val;
+ }
+}
+
/* Add a set of new FNS into a lookup. */
tree
return lookup;
}
+/* FNS is a new overload set, add it to LOOKUP, if it is not already
+ present there. */
+
+tree
+lookup_maybe_add (tree fns, tree lookup)
+{
+ if (LOOKUP_SEEN_P (fns))
+ return lookup;
+
+ if (lookup && TREE_CODE (fns) == OVERLOAD)
+ {
+ /* Determine if we already have some part of this overload in
+ the overload set. If so fix things up so we only have the
+ overload set once. */
+ tree marked = NULL_TREE;
+
+ for (tree probe = fns; probe; probe = OVL_CHAIN (probe))
+ if (LOOKUP_SEEN_P (probe))
+ {
+ marked = probe;
+ break;
+ }
+ else if (TREE_CODE (probe) != OVERLOAD)
+ break;
+
+ if (marked)
+ {
+ /* The tail of this overload is already in the lookup
+ set. Stitch out the tail case, which might involve
+ copying. */
+ bool rewrite = false;
+
+ LOOKUP_SEEN_P (marked) = false;
+ for (tree *prev = &lookup, probe = *prev;
+ ; prev = &OVL_CHAIN (probe), probe = *prev)
+ {
+ if (probe == marked)
+ {
+ *prev = NULL_TREE;
+ break;
+ }
+ gcc_checking_assert (OVL_LOOKUP_P (probe));
+ if (marked == OVL_FUNCTION (probe))
+ {
+ *prev = OVL_CHAIN (probe);
+ break;
+ }
+
+ /* If we're in a used part of the lookup set, copy the
+ node, so as to not disturb stored uses. */
+ gcc_checking_assert (!rewrite || OVL_USED_P (probe));
+ if (OVL_USED_P (probe))
+ {
+ rewrite = true;
+ probe = ovl_copy (probe);
+ OVL_LOOKUP_P (probe) = true;
+ *prev = probe;
+ }
+ }
+ }
+ }
+
+ /* Finally mark the new overload and prepend it to the current
+ lookup. */
+ LOOKUP_SEEN_P (fns) = true;
+
+ return lookup_add (fns, lookup);
+}
+
/* If KEEP is true, preserve the contents of a lookup so that it is
available for a later instantiation. Otherwise release the LOOKUP
nodes for reuse. */