/* Perform non-arithmetic operations on values, for GDB.
- Copyright (C) 1986-2017 Free Software Foundation, Inc.
+ Copyright (C) 1986-2018 Free Software Foundation, Inc.
This file is part of GDB.
#include "infcall.h"
#include "dictionary.h"
#include "cp-support.h"
-#include "dfp.h"
+#include "target-float.h"
#include "tracepoint.h"
-#include "observer.h"
+#include "observable.h"
#include "objfiles.h"
#include "extension.h"
+#include "byte-vector.h"
extern unsigned int overload_debug;
/* Local functions. */
const int no_adl);
static int find_oload_champ (struct value **, int, int,
- struct fn_field *, VEC (xmethod_worker_ptr) *,
+ struct fn_field *,
+ const std::vector<xmethod_worker_up> *,
struct symbol **, struct badness_vector **);
static int oload_method_static_p (struct fn_field *, int);
static void find_method_list (struct value **, const char *,
LONGEST, struct type *, struct fn_field **, int *,
- VEC (xmethod_worker_ptr) **,
+ std::vector<xmethod_worker_up> *,
struct type **, LONGEST *);
-void _initialize_valops (void);
-
#if 0
/* Flag for whether we want to abandon failed expression evals by
default. */
struct value *blocklen;
blocklen = value_from_longest (builtin_type (gdbarch)->builtin_int, len);
- val = call_function_by_hand (val, 1, &blocklen);
+ val = call_function_by_hand (val, NULL, 1, &blocklen);
if (value_logical_not (val))
{
if (!target_has_execution)
/* We deref the value and then do the cast. */
return value_cast (type, coerce_ref (arg2));
+ /* Strip typedefs / resolve stubs in order to get at the type's
+ code/length, but remember the original type, to use as the
+ resulting type of the cast, in case it was a typedef. */
+ struct type *to_type = type;
+
type = check_typedef (type);
code1 = TYPE_CODE (type);
arg2 = coerce_ref (arg2);
code2 = TYPE_CODE (type2);
if (code1 == TYPE_CODE_COMPLEX)
- return cast_into_complex (type, arg2);
+ return cast_into_complex (to_type, arg2);
if (code1 == TYPE_CODE_BOOL)
{
code1 = TYPE_CODE_INT;
&& (code2 == TYPE_CODE_STRUCT || code2 == TYPE_CODE_UNION)
&& TYPE_NAME (type) != 0)
{
- struct value *v = value_cast_structs (type, arg2);
+ struct value *v = value_cast_structs (to_type, arg2);
if (v)
return v;
}
- if (code1 == TYPE_CODE_FLT && scalar)
- return value_from_double (type, value_as_double (arg2));
- else if (code1 == TYPE_CODE_DECFLOAT && scalar)
+ if (is_floating_type (type) && scalar)
{
- enum bfd_endian byte_order = gdbarch_byte_order (get_type_arch (type));
- int dec_len = TYPE_LENGTH (type);
- gdb_byte dec[16];
+ if (is_floating_value (arg2))
+ {
+ struct value *v = allocate_value (to_type);
+ target_float_convert (value_contents (arg2), type2,
+ value_contents_raw (v), type);
+ return v;
+ }
- if (code2 == TYPE_CODE_FLT)
- decimal_from_floating (arg2, dec, dec_len, byte_order);
- else if (code2 == TYPE_CODE_DECFLOAT)
- decimal_convert (value_contents (arg2), TYPE_LENGTH (type2),
- byte_order, dec, dec_len, byte_order);
+ /* The only option left is an integral type. */
+ if (TYPE_UNSIGNED (type2))
+ return value_from_ulongest (to_type, value_as_long (arg2));
else
- /* The only option left is an integral type. */
- decimal_from_integral (arg2, dec, dec_len, byte_order);
-
- return value_from_decfloat (type, dec);
+ return value_from_longest (to_type, value_as_long (arg2));
}
else if ((code1 == TYPE_CODE_INT || code1 == TYPE_CODE_ENUM
|| code1 == TYPE_CODE_RANGE)
gdbarch_byte_order (get_type_arch (type2)));
else
longest = value_as_long (arg2);
- return value_from_longest (type, convert_to_boolean ?
+ return value_from_longest (to_type, convert_to_boolean ?
(LONGEST) (longest ? 1 : 0) : longest);
}
else if (code1 == TYPE_CODE_PTR && (code2 == TYPE_CODE_INT
|| longest <= -((LONGEST) 1 << addr_bit))
warning (_("value truncated"));
}
- return value_from_longest (type, longest);
+ return value_from_longest (to_type, longest);
}
else if (code1 == TYPE_CODE_METHODPTR && code2 == TYPE_CODE_INT
&& value_as_long (arg2) == 0)
{
- struct value *result = allocate_value (type);
+ struct value *result = allocate_value (to_type);
- cplus_make_method_ptr (type, value_contents_writeable (result), 0, 0);
+ cplus_make_method_ptr (to_type, value_contents_writeable (result), 0, 0);
return result;
}
else if (code1 == TYPE_CODE_MEMBERPTR && code2 == TYPE_CODE_INT
{
/* The Itanium C++ ABI represents NULL pointers to members as
minus one, instead of biasing the normal case. */
- return value_from_longest (type, -1);
+ return value_from_longest (to_type, -1);
}
else if (code1 == TYPE_CODE_ARRAY && TYPE_VECTOR (type)
&& code2 == TYPE_CODE_ARRAY && TYPE_VECTOR (type2)
error (_("can only cast scalar to vector of same size"));
else if (code1 == TYPE_CODE_VOID)
{
- return value_zero (type, not_lval);
+ return value_zero (to_type, not_lval);
}
else if (TYPE_LENGTH (type) == TYPE_LENGTH (type2))
{
if (code1 == TYPE_CODE_PTR && code2 == TYPE_CODE_PTR)
- return value_cast_pointers (type, arg2, 0);
+ return value_cast_pointers (to_type, arg2, 0);
arg2 = value_copy (arg2);
- deprecated_set_value_type (arg2, type);
- set_value_enclosing_type (arg2, type);
+ deprecated_set_value_type (arg2, to_type);
+ set_value_enclosing_type (arg2, to_type);
set_value_pointed_to_offset (arg2, 0); /* pai: chk_val */
return arg2;
}
else if (VALUE_LVAL (arg2) == lval_memory)
- return value_at_lazy (type, value_address (arg2));
+ return value_at_lazy (to_type, value_address (arg2));
else
{
error (_("Invalid cast."));
struct type *type1 = check_typedef (type);
struct value *val;
- if (TYPE_CODE (type1) == TYPE_CODE_DECFLOAT)
- {
- enum bfd_endian byte_order = gdbarch_byte_order (get_type_arch (type));
- gdb_byte v[16];
-
- decimal_from_string (v, TYPE_LENGTH (type), byte_order, "1");
- val = value_from_decfloat (type, v);
- }
- else if (TYPE_CODE (type1) == TYPE_CODE_FLT)
- {
- val = value_from_double (type, (DOUBLEST) 1);
- }
- else if (is_integral_type (type1))
+ if (is_integral_type (type1) || is_floating_type (type1))
{
val = value_from_longest (type, (LONGEST) 1);
}
}
void
-read_value_memory (struct value *val, LONGEST embedded_offset,
+read_value_memory (struct value *val, LONGEST bit_offset,
int stack, CORE_ADDR memaddr,
gdb_byte *buffer, size_t length)
{
enum target_xfer_status status;
ULONGEST xfered_partial;
- status = target_xfer_partial (current_target.beneath,
+ status = target_xfer_partial (target_stack,
object, NULL,
buffer + xfered_total * unit_size, NULL,
memaddr + xfered_total,
if (status == TARGET_XFER_OK)
/* nothing */;
else if (status == TARGET_XFER_UNAVAILABLE)
- mark_value_bytes_unavailable (val, embedded_offset + xfered_total,
- xfered_partial);
+ mark_value_bits_unavailable (val, (xfered_total * HOST_CHAR_BIT
+ + bit_offset),
+ xfered_partial * HOST_CHAR_BIT);
else if (status == TARGET_XFER_EOF)
memory_error (TARGET_XFER_E_IO, memaddr + xfered_total);
else
}
}
- observer_notify_register_changed (frame, value_reg);
+ gdb::observers::register_changed.notify (frame, value_reg);
break;
}
case lval_register:
case lval_computed:
- observer_notify_target_changed (¤t_target);
+ gdb::observers::target_changed.notify (target_stack);
/* Having destroyed the frame cache, restore the selected
frame. */
if (offset < 0 || offset >= TYPE_LENGTH (type))
{
- gdb_byte *tmp;
- struct cleanup *back_to;
CORE_ADDR address;
- tmp = (gdb_byte *) xmalloc (TYPE_LENGTH (baseclass));
- back_to = make_cleanup (xfree, tmp);
+ gdb::byte_vector tmp (TYPE_LENGTH (baseclass));
address = value_address (*arg1p);
if (target_read_memory (address + offset,
- tmp, TYPE_LENGTH (baseclass)) != 0)
+ tmp.data (), TYPE_LENGTH (baseclass)) != 0)
error (_("virtual baseclass botch"));
base_val = value_from_contents_and_address (baseclass,
- tmp,
+ tmp.data (),
address + offset);
base_valaddr = value_contents_for_printing (base_val);
this_offset = 0;
- do_cleanups (back_to);
}
else
{
return NULL;
}
+/* See value.h. */
+
+int
+value_union_variant (struct type *union_type, const gdb_byte *contents)
+{
+ gdb_assert (TYPE_CODE (union_type) == TYPE_CODE_UNION
+ && TYPE_FLAG_DISCRIMINATED_UNION (union_type));
+
+ struct dynamic_prop *discriminant_prop
+ = get_dyn_prop (DYN_PROP_DISCRIMINATED, union_type);
+ gdb_assert (discriminant_prop != nullptr);
+
+ struct discriminant_info *info
+ = (struct discriminant_info *) discriminant_prop->data.baton;
+ gdb_assert (info != nullptr);
+
+ /* If this is a univariant union, just return the sole field. */
+ if (TYPE_NFIELDS (union_type) == 1)
+ return 0;
+ /* This should only happen for univariants, which we already dealt
+ with. */
+ gdb_assert (info->discriminant_index != -1);
+
+ /* Compute the discriminant. Note that unpack_field_as_long handles
+ sign extension when necessary, as does the DWARF reader -- so
+ signed discriminants will be handled correctly despite the use of
+ an unsigned type here. */
+ ULONGEST discriminant = unpack_field_as_long (union_type, contents,
+ info->discriminant_index);
+
+ for (int i = 0; i < TYPE_NFIELDS (union_type); ++i)
+ {
+ if (i != info->default_index
+ && i != info->discriminant_index
+ && discriminant == info->discriminants[i])
+ return i;
+ }
+
+ if (info->default_index == -1)
+ error (_("Could not find variant corresponding to discriminant %s"),
+ pulongest (discriminant));
+ return info->default_index;
+}
+
/* Search through the methods of an object (and its bases) to find a
specified method. Return the pointer to the fn_field list FN_LIST of
overloaded instances defined in the source language. If available
find_method_list (struct value **argp, const char *method,
LONGEST offset, struct type *type,
struct fn_field **fn_list, int *num_fns,
- VEC (xmethod_worker_ptr) **xm_worker_vec,
+ std::vector<xmethod_worker_up> *xm_worker_vec,
struct type **basetype, LONGEST *boffset)
{
int i;
struct fn_field *f = NULL;
- VEC (xmethod_worker_ptr) *worker_vec = NULL, *new_vec = NULL;
gdb_assert (fn_list != NULL && xm_worker_vec != NULL);
type = check_typedef (type);
and hence there is no point restricting them with something like method
hiding. Moreover, if hiding is done for xmethods as well, then we will
have to provide a mechanism to un-hide (like the 'using' construct). */
- worker_vec = get_matching_xmethod_workers (type, method);
- new_vec = VEC_merge (xmethod_worker_ptr, *xm_worker_vec, worker_vec);
-
- VEC_free (xmethod_worker_ptr, *xm_worker_vec);
- VEC_free (xmethod_worker_ptr, worker_vec);
- *xm_worker_vec = new_vec;
+ get_matching_xmethod_workers (type, method, xm_worker_vec);
/* If source methods are not found in current class, look for them in the
base classes. We also have to go through the base classes to gather
value_find_oload_method_list (struct value **argp, const char *method,
LONGEST offset, struct fn_field **fn_list,
int *num_fns,
- VEC (xmethod_worker_ptr) **xm_worker_vec,
+ std::vector<xmethod_worker_up> *xm_worker_vec,
struct type **basetype, LONGEST *boffset)
{
struct type *t;
/* Clear the lists. */
*fn_list = NULL;
*num_fns = 0;
- *xm_worker_vec = NULL;
+ xm_worker_vec->clear ();
find_method_list (argp, method, 0, t, fn_list, num_fns, xm_worker_vec,
basetype, boffset);
struct fn_field *fns_ptr = NULL;
/* For non-methods, the list of overloaded function symbols. */
struct symbol **oload_syms = NULL;
- /* For xmethods, the VEC of xmethod workers. */
- VEC (xmethod_worker_ptr) *xm_worker_vec = NULL;
+ /* For xmethods, the vector of xmethod workers. */
+ std::vector<xmethod_worker_up> xm_worker_vec;
/* Number of overloaded instances being considered. */
int num_fns = 0;
struct type *basetype = NULL;
value_find_oload_method_list (&temp, name, 0, &fns_ptr, &num_fns,
&xm_worker_vec, &basetype, &boffset);
/* If this is a method only search, and no methods were found
- the search has faild. */
- if (method == METHOD && (!fns_ptr || !num_fns) && !xm_worker_vec)
+ the search has failed. */
+ if (method == METHOD && (!fns_ptr || !num_fns) && xm_worker_vec.empty ())
error (_("Couldn't find method %s%s%s"),
obj_type_name,
(obj_type_name && *obj_type_name) ? "::" : "",
make_cleanup (xfree, src_method_badness);
}
- if (VEC_length (xmethod_worker_ptr, xm_worker_vec) > 0)
+ if (!xm_worker_vec.empty ())
{
ext_method_oload_champ = find_oload_champ (args, nargs,
- 0, NULL, xm_worker_vec,
+ 0, NULL, &xm_worker_vec,
NULL, &ext_method_badness);
ext_method_match_quality = classify_oload_match (ext_method_badness,
nargs, 0);
make_cleanup (xfree, ext_method_badness);
- make_cleanup (free_xmethod_worker_vec, xm_worker_vec);
}
if (src_method_oload_champ >= 0 && ext_method_oload_champ >= 0)
basetype, boffset);
}
else
- {
- *valp = value_of_xmethod (clone_xmethod_worker
- (VEC_index (xmethod_worker_ptr, xm_worker_vec,
- ext_method_oload_champ)));
- }
+ *valp = value_from_xmethod
+ (std::move (xm_worker_vec[ext_method_oload_champ]));
}
else
*symp = oload_syms[func_oload_champ];
static int
find_oload_champ (struct value **args, int nargs,
int num_fns, struct fn_field *fns_ptr,
- VEC (xmethod_worker_ptr) *xm_worker_vec,
+ const std::vector<xmethod_worker_up> *xm_worker_vec,
struct symbol **oload_syms,
struct badness_vector **oload_champ_bv)
{
int ix;
- int fn_count;
/* A measure of how good an overloaded instance is. */
struct badness_vector *bv;
/* Index of best overloaded function. */
*oload_champ_bv = NULL;
- fn_count = (xm_worker_vec != NULL
- ? VEC_length (xmethod_worker_ptr, xm_worker_vec)
- : num_fns);
+ int fn_count = xm_worker_vec != NULL ? xm_worker_vec->size () : num_fns;
+
/* Consider each candidate in turn. */
for (ix = 0; ix < fn_count; ix++)
{
int static_offset = 0;
int nparms;
struct type **parm_types;
- struct xmethod_worker *worker = NULL;
if (xm_worker_vec != NULL)
{
- worker = VEC_index (xmethod_worker_ptr, xm_worker_vec, ix);
- parm_types = get_xmethod_arg_types (worker, &nparms);
+ xmethod_worker *worker = (*xm_worker_vec)[ix].get ();
+ parm_types = worker->get_arg_types (&nparms);
}
else
{
{
for (j = 0; j < len; ++j)
{
+ if (TYPE_CONST (intype) != TYPE_FN_FIELD_CONST (f, j))
+ continue;
+ if (TYPE_VOLATILE (intype) != TYPE_FN_FIELD_VOLATILE (f, j))
+ continue;
+
if (compare_parameters (TYPE_FN_FIELD_TYPE (f, j), intype, 0)
|| compare_parameters (TYPE_FN_FIELD_TYPE (f, j),
intype, 1))