gdb_static_assert (AARCH64_V0_REGNUM == AARCH64_SVE_Z0_REGNUM);
if (regcache->raw_read (v_regnum, reg_buf) != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
else
memcpy (result_value->contents_raw ().data (), reg_buf, regsize);
/* Read the bottom 4 bytes of X. */
if (regcache->raw_read_part (x_regnum, offset, 4, data) != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 4);
+ result_value->mark_bytes_unavailable (0, 4);
else
memcpy (result_value->contents_raw ().data (), data, 4);
{
struct value *result;
- if (value_optimized_out (val))
+ if (val->optimized_out ())
result = value::allocate_optimized_out (type);
else if (val->lazy ()
/* Be careful not to make a lazy not_lval value. */
gdb_printf (stream, "(");
print_optional_low_bound (stream, type, options);
- if (value_entirely_optimized_out (val))
+ if (val->entirely_optimized_out ())
val_print_optimized_out (val, stream);
else if (TYPE_FIELD_BITSIZE (type, 0) > 0)
{
struct value *value = get_frame_register_value (frame, regnum);
gdb_assert (value != NULL);
- *optimizedp = value_optimized_out (value);
- *unavailablep = !value_entirely_available (value);
+ *optimizedp = value->optimized_out ();
+ *unavailablep = !value->entirely_available ();
if (*optimizedp || *unavailablep)
{
if (status == REG_VALID)
memcpy (buf, raw_buf + 1, 1);
else
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
}
else
{
if (status == REG_VALID)
memcpy (buf, raw_buf, 1);
else
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
}
}
else if (i386_dword_regnum_p (gdbarch, regnum))
if (status == REG_VALID)
memcpy (buf, raw_buf, 4);
else
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
}
else
i386_pseudo_register_read_into_value (gdbarch, regcache, regnum,
long as the entire array is valid. */
if (c_textual_element_type (unresolved_elttype,
options->format)
- && value_bytes_available (val, 0, type->length ())
- && !value_bits_any_optimized_out (val, 0,
- TARGET_CHAR_BIT * type->length ()))
+ && val->bytes_available (0, type->length ())
+ && !val->bits_any_optimized_out (0,
+ TARGET_CHAR_BIT * type->length ()))
{
int force_ellipses = 0;
/* Pointer to class, check real type of object. */
gdb_printf (stream, "(");
- if (value_entirely_available (val))
+ if (val->entirely_available ())
{
real_type = value_rtti_indirect_type (val, &full, &top,
&using_enc);
regnum = compile_register_name_demangle (gdbarch, reg_name);
regval = value_from_register (reg_type, regnum, get_current_frame ());
- if (value_optimized_out (regval))
+ if (regval->optimized_out ())
error (_("Register \"%s\" is optimized out."), reg_name);
- if (!value_entirely_available (regval))
+ if (!regval->entirely_available ())
error (_("Register \"%s\" is not available."), reg_name);
inferior_addr = regs_base + reg_offset;
{
struct value_print_options opts;
- if (value_entirely_optimized_out (val))
+ if (val->entirely_optimized_out ())
{
val_print_optimized_out (val, stream);
return;
&& type->field (0).type ()->code () == TYPE_CODE_INT
&& strcmp (type->field (0).name (), "length") == 0
&& strcmp (type->field (1).name (), "ptr") == 0
- && !value_bits_any_optimized_out (val,
- TARGET_CHAR_BIT * embedded_offset,
- TARGET_CHAR_BIT * type->length ()))
+ && !val->bits_any_optimized_out (TARGET_CHAR_BIT * embedded_offset,
+ TARGET_CHAR_BIT * type->length ()))
{
CORE_ADDR addr;
struct type *elttype;
{
if (check_optimized)
return true;
- mark_value_bits_optimized_out (v, offset,
- this_size_bits);
+ v->mark_bits_optimized_out (offset,
+ this_size_bits);
}
if (unavail && !check_optimized)
- mark_value_bits_unavailable (v, offset,
- this_size_bits);
+ v->mark_bits_unavailable (offset,
+ this_size_bits);
break;
}
if (from != nullptr)
{
- mark_value_bits_optimized_out (v, offset, this_size_bits);
+ v->mark_bits_optimized_out (offset, this_size_bits);
break;
}
if (from != nullptr)
{
- mark_value_bits_optimized_out (v, offset, this_size_bits);
+ v->mark_bits_optimized_out (offset, this_size_bits);
break;
}
case DWARF_VALUE_IMPLICIT_POINTER:
if (from != nullptr)
{
- mark_value_bits_optimized_out (v, offset, this_size_bits);
+ v->mark_bits_optimized_out (offset, this_size_bits);
break;
}
case DWARF_VALUE_OPTIMIZED_OUT:
if (check_optimized)
return true;
- mark_value_bits_optimized_out (v, offset, this_size_bits);
+ v->mark_bits_optimized_out (offset, this_size_bits);
break;
default:
retval = value_from_register (subobj_type, gdb_regnum,
this->m_frame);
- if (value_optimized_out (retval))
+ if (retval->optimized_out ())
{
/* This means the register has undefined value / was
not saved. As we're computing the location of some
{
free_values.free_to_mark ();
retval = value::allocate (subobj_type);
- mark_value_bytes_unavailable (retval, 0,
- subobj_type->length ());
+ retval->mark_bytes_unavailable (0,
+ subobj_type->length ());
return retval;
}
else if (ex.error == NO_ENTRY_VALUE_ERROR)
throw;
}
- if (value_optimized_out (result))
+ if (result->optimized_out ())
return 0;
if (VALUE_LVAL (result) == lval_memory)
val = dwarf2_evaluate_loc_desc (baton->property_type, frame, data,
size, baton->loclist.per_cu,
baton->loclist.per_objfile);
- if (!value_optimized_out (val))
+ if (!val->optimized_out ())
{
*value = value_as_address (val);
return true;
struct value *e_val = value_from_component (m_val, elt_type, elt_off);
struct value *e_prev = value_from_component (m_val, elt_type,
elt_off_prev);
- repeated = ((value_entirely_available (e_prev)
- && value_entirely_available (e_val)
+ repeated = ((e_prev->entirely_available ()
+ && e_val->entirely_available ()
&& e_prev->contents_eq (e_val))
- || (value_entirely_unavailable (e_prev)
- && value_entirely_unavailable (e_val)));
+ || (e_prev->entirely_unavailable ()
+ && e_val->entirely_unavailable ()));
}
if (repeated)
struct value *e_val1 = value_from_component (val, type, offset1);
struct value *e_val2 = value_from_component (val, type, offset2);
- return ((value_entirely_available (e_val1)
- && value_entirely_available (e_val2)
+ return ((e_val1->entirely_available ()
+ && e_val2->entirely_available ()
&& e_val1->contents_eq (e_val2))
- || (value_entirely_unavailable (e_val1)
- && value_entirely_unavailable (e_val2)));
+ || (e_val1->entirely_unavailable ()
+ && e_val2->entirely_unavailable ()));
}
}
if (!ok)
{
if (optim)
- mark_value_bytes_optimized_out (v, 0, type->length ());
+ v->mark_bytes_optimized_out (0, type->length ());
if (unavail)
- mark_value_bytes_unavailable (v, 0, type->length ());
+ v->mark_bytes_unavailable (0, type->length ());
}
}
else
value = gdbarch_value_from_register (gdbarch, type, regnum, null_frame_id);
read_frame_register_value (value, frame);
- if (value_optimized_out (value))
+ if (value->optimized_out ())
{
/* This function is used while computing a location expression.
Complain about the value being optimized out, rather than
gdb_assert (value != NULL);
- *optimizedp = value_optimized_out (value);
- *unavailablep = !value_entirely_available (value);
+ *optimizedp = value->optimized_out ();
+ *unavailablep = !value->entirely_available ();
*lvalp = VALUE_LVAL (value);
*addrp = value->address ();
if (*lvalp == lval_register)
string_file debug_file;
gdb_printf (&debug_file, " ->");
- if (value_optimized_out (value))
+ if (value->optimized_out ())
{
gdb_printf (&debug_file, " ");
val_print_not_saved (&debug_file);
gdb_assert (value != NULL);
- if (value_optimized_out (value))
+ if (value->optimized_out ())
{
throw_error (OPTIMIZED_OUT_ERROR,
_("Register %d was not saved"), regnum);
}
- if (!value_entirely_available (value))
+ if (!value->entirely_available ())
{
throw_error (NOT_AVAILABLE_ERROR,
_("Register %d is not available"), regnum);
gdb_assert (value != NULL);
- if (value_optimized_out (value))
+ if (value->optimized_out ())
{
throw_error (OPTIMIZED_OUT_ERROR,
_("Register %d was not saved"), regnum);
}
- if (!value_entirely_available (value))
+ if (!value->entirely_available ())
{
throw_error (NOT_AVAILABLE_ERROR,
_("Register %d is not available"), regnum);
{
struct value *regval = get_frame_register_value (frame, regnum);
- if (!value_optimized_out (regval)
- && value_entirely_available (regval))
+ if (!regval->optimized_out ()
+ && regval->entirely_available ())
{
struct gdbarch *gdbarch = get_frame_arch (frame);
enum bfd_endian byte_order = gdbarch_byte_order (gdbarch);
= frame_unwind_register_value (frame_info_ptr (frame->next),
regnum);
gdb_assert (value != NULL);
- *optimizedp = value_optimized_out (value);
- *unavailablep = !value_entirely_available (value);
+ *optimizedp = value->optimized_out ();
+ *unavailablep = !value->entirely_available ();
if (*optimizedp || *unavailablep)
{
field_offset = type->field (i).loc_bitpos () / 8;
field_length = field_type->length ();
- if (!value_bytes_available (val, embedded_offset + field_offset,
- field_length))
+ if (!val->bytes_available (embedded_offset + field_offset,
+ field_length))
throw_error (NOT_AVAILABLE_ERROR,
_("Virtual baseclass pointer is not available"));
value->fetch_lazy ();
/* No pretty-printer support for unavailable values. */
- if (!value_bytes_available (value, 0, type->length ()))
+ if (!value->bytes_available (0, type->length ()))
return EXT_LANG_RC_NOP;
if (!gdb_scheme_initialized)
return gdbscm_wrap ([=]
{
- return scm_from_bool (value_optimized_out (v_smob->value));
+ return scm_from_bool (v_smob->value->optimized_out ());
});
}
/* Extract (always little endian). */
status = regcache->raw_read (fpnum, raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
else
memcpy (buf, raw_buf, register_size (gdbarch, regnum));
}
status = regcache->raw_read (I387_BND0R_REGNUM (tdep) + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 16);
+ result_value->mark_bytes_unavailable (0, 16);
else
{
enum bfd_endian byte_order = gdbarch_byte_order (target_gdbarch ());
/* Extract (always little endian). */
status = regcache->raw_read (tdep->k0_regnum + regnum, raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 8);
+ result_value->mark_bytes_unavailable (0, 8);
else
memcpy (buf, raw_buf, 8);
}
status = regcache->raw_read (I387_XMM0_REGNUM (tdep) + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 16);
+ result_value->mark_bytes_unavailable (0, 16);
else
memcpy (buf, raw_buf, 16);
status = regcache->raw_read (tdep->ymm0h_regnum + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 16, 16);
+ result_value->mark_bytes_unavailable (16, 16);
else
memcpy (buf + 16, raw_buf, 16);
}
- num_lower_zmm_regs,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 16);
+ result_value->mark_bytes_unavailable (0, 16);
else
memcpy (buf, raw_buf, 16);
- num_lower_zmm_regs,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 16, 16);
+ result_value->mark_bytes_unavailable (16, 16);
else
memcpy (buf + 16, raw_buf, 16);
}
status = regcache->raw_read (tdep->zmm0h_regnum + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 32, 32);
+ result_value->mark_bytes_unavailable (32, 32);
else
memcpy (buf + 32, raw_buf, 32);
}
status = regcache->raw_read (I387_XMM0_REGNUM (tdep) + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 16);
+ result_value->mark_bytes_unavailable (0, 16);
else
memcpy (buf, raw_buf, 16);
/* Read upper 128bits. */
status = regcache->raw_read (tdep->ymm0h_regnum + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 16, 32);
+ result_value->mark_bytes_unavailable (16, 32);
else
memcpy (buf + 16, raw_buf, 16);
}
status = regcache->raw_read (I387_XMM16_REGNUM (tdep) + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0, 16);
+ result_value->mark_bytes_unavailable (0, 16);
else
memcpy (buf, raw_buf, 16);
/* Read upper 128bits. */
status = regcache->raw_read (tdep->ymm16h_regnum + regnum,
raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 16, 16);
+ result_value->mark_bytes_unavailable (16, 16);
else
memcpy (buf + 16, raw_buf, 16);
}
/* Extract (always little endian). */
status = regcache->raw_read (gpnum, raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
else
memcpy (buf, raw_buf, 2);
}
upper registers. */
status = regcache->raw_read (gpnum % 4, raw_buf);
if (status != REG_VALID)
- mark_value_bytes_unavailable (result_value, 0,
- result_value->type ()->length ());
+ result_value->mark_bytes_unavailable (0,
+ result_value->type ()->length ());
else if (gpnum >= 4)
memcpy (buf, raw_buf + 1, 1);
else
regnum = (fpreg + 8 - top) % 8 + I387_ST0_REGNUM (tdep);
regval = get_frame_register_value (frame, regnum);
- if (value_entirely_available (regval))
+ if (regval->entirely_available ())
{
const gdb_byte *raw = regval->contents ().data ();
format_stream.puts (name);
pad_to_column (format_stream, value_column_1);
- print_raw_format = (value_entirely_available (val)
- && !value_optimized_out (val));
+ print_raw_format = (val->entirely_available ()
+ && !val->optimized_out ());
/* If virtual format is floating, print it that way, and in raw
hex. */
b, VAR_DOMAIN);
value = read_var_value (vsym.symbol, vsym.block, frame);
/* If the value was optimized out, revert to the old behavior. */
- if (! value_optimized_out (value))
+ if (! value->optimized_out ())
{
handler = value_as_address (value);
&& (arg->val || arg->error)));
if (skip_unavailable && arg->val != NULL
- && (value_entirely_unavailable (arg->val)
+ && (arg->val->entirely_unavailable ()
/* A scalar object that does not have all bits available is
also considered unavailable, because all bits contribute
to its representation. */
|| (val_print_scalar_type_p (arg->val->type ())
- && !value_bytes_available (arg->val,
- arg->val->embedded_offset (),
- arg->val->type ()->length ()))))
+ && !arg->val->bytes_available (arg->val->embedded_offset (),
+ arg->val->type ()->length ()))))
return;
gdb::optional<ui_out_emit_tuple> tuple_emitter;
struct value *val = value_of_register (regnum, frame);
struct value_print_options opts;
- if (skip_unavailable && !value_entirely_available (val))
+ if (skip_unavailable && !val->entirely_available ())
return;
ui_out_emit_tuple tuple_emitter (uiout, NULL);
/* OK: get the data in raw format. */
value = get_frame_register_value (frame, regnum);
- if (value_optimized_out (value)
- || !value_entirely_available (value))
+ if (value->optimized_out ()
+ || !value->entirely_available ())
{
gdb_printf (file, "%*s ",
(int) mips_abi_regsize (gdbarch) * 2,
struct type *type = val->type ();
struct value_print_options opts;
- if (value_entirely_optimized_out (val))
+ if (val->entirely_optimized_out ())
{
val_print_optimized_out (val, stream);
return;
/* OK, we have an address value. Check we have a complete value we
can extract. */
- if (value_optimized_out (value)
- || !value_entirely_available (value))
+ if (value->optimized_out ()
+ || !value->entirely_available ())
return false;
/* We do. Check whether it includes any tags. */
value->fetch_lazy ();
/* No pretty-printer support for unavailable values. */
- if (!value_bytes_available (value, 0, type->length ()))
+ if (!value->bytes_available (0, type->length ()))
return EXT_LANG_RC_NOP;
if (!gdb_python_initialized)
try
{
- opt = value_optimized_out (value);
+ opt = value->optimized_out ();
}
catch (const gdb_exception &except)
{
computed = gdbarch_pseudo_register_read_value (m_descr->gdbarch,
this, regnum);
- if (value_entirely_available (computed))
+ if (computed->entirely_available ())
memcpy (buf, computed->contents_raw ().data (),
m_descr->sizeof_register[regnum]);
else
API is preferred. */
if (cooked_read (regnum,
result->contents_raw ().data ()) == REG_UNAVAILABLE)
- mark_value_bytes_unavailable (result, 0,
- result->type ()->length ());
+ result->mark_bytes_unavailable (0,
+ result->type ()->length ());
return result;
}
return;
}
- print_raw_format = (value_entirely_available (val)
- && !value_optimized_out (val));
+ print_raw_format = (val->entirely_available ()
+ && !val->optimized_out ());
if (regtype->code () == TYPE_CODE_FLT
|| (regtype->code () == TYPE_CODE_UNION
struct value *val;
val = frame_unwind_register_value (this_frame, S390_PSWA_REGNUM);
- if (!value_optimized_out (val))
+ if (!val->optimized_out ())
{
LONGEST pswa = value_as_long (val);
struct value *val;
val = frame_unwind_register_value (this_frame, S390_PSWM_REGNUM);
- if (!value_optimized_out (val))
+ if (!val->optimized_out ())
{
LONGEST pswm = value_as_long (val);
struct value *val;
val = frame_unwind_register_value (this_frame, S390_R0_REGNUM + reg);
- if (!value_optimized_out (val))
+ if (!val->optimized_out ())
return value_cast (type, val);
}
&& SYMBOL_COMPUTED_OPS (sym)->read_variable_at_entry != NULL
&& fp_opts.print_entry_values != print_entry_values_no
&& (fp_opts.print_entry_values != print_entry_values_if_needed
- || !val || value_optimized_out (val)))
+ || !val || val->optimized_out ()))
{
try
{
}
}
- if (entryval != NULL && value_optimized_out (entryval))
+ if (entryval != NULL && entryval->optimized_out ())
entryval = NULL;
if (fp_opts.print_entry_values == print_entry_values_compact
if (fp_opts.print_entry_values == print_entry_values_only
|| fp_opts.print_entry_values == print_entry_values_both
|| (fp_opts.print_entry_values == print_entry_values_preferred
- && (!val || value_optimized_out (val))))
+ && (!val || val->optimized_out ())))
{
entryval = value::allocate_optimized_out (sym->type ());
entryval_error = NULL;
if ((fp_opts.print_entry_values == print_entry_values_compact
|| fp_opts.print_entry_values == print_entry_values_if_needed
|| fp_opts.print_entry_values == print_entry_values_preferred)
- && (!val || value_optimized_out (val)) && entryval != NULL)
+ && (!val || val->optimized_out ()) && entryval != NULL)
{
val = NULL;
val_error = NULL;
struct value *value = frame_unwind_register_value (fi, sp_regnum);
gdb_assert (value != NULL);
- if (!value_optimized_out (value) && value_entirely_available (value))
+ if (!value->optimized_out () && value->entirely_available ())
{
if (VALUE_LVAL (value) == not_lval)
{
struct type *elt_type = check_typedef (tarray->target_type ());
LONGEST elt_size = type_length_units (elt_type);
if (!array->lazy ()
- && !value_bytes_available (array, elt_size * index, elt_size))
+ && !array->bytes_available (elt_size * index, elt_size))
{
struct value *val = value::allocate (elt_type);
- mark_value_bytes_unavailable (val, 0, elt_size);
+ val->mark_bytes_unavailable (0, elt_size);
VALUE_LVAL (val) = lval_memory;
val->set_address (array->address () + elt_size * index);
return val;
if (status == TARGET_XFER_OK)
/* nothing */;
else if (status == TARGET_XFER_UNAVAILABLE)
- mark_value_bits_unavailable (val, (xfered_total * HOST_CHAR_BIT
- + bit_offset),
- xfered_partial * HOST_CHAR_BIT);
+ val->mark_bits_unavailable ((xfered_total * HOST_CHAR_BIT
+ + bit_offset),
+ xfered_partial * HOST_CHAR_BIT);
else if (status == TARGET_XFER_EOF)
memory_error (TARGET_XFER_E_IO, memaddr + xfered_total);
else
&& type->code () != TYPE_CODE_STRUCT
&& type->code () != TYPE_CODE_ARRAY)
{
- if (value_bits_any_optimized_out (val,
- TARGET_CHAR_BIT * embedded_offset,
- TARGET_CHAR_BIT * type->length ()))
+ if (val->bits_any_optimized_out (TARGET_CHAR_BIT * embedded_offset,
+ TARGET_CHAR_BIT * type->length ()))
{
val_print_optimized_out (val, stream);
return 0;
return is_ref;
}
- if (!value_bytes_available (val, embedded_offset, type->length ()))
+ if (!val->bytes_available (embedded_offset, type->length ()))
{
val_print_unavailable (stream);
return 0;
return 0;
}
- if (value_entirely_optimized_out (val))
+ if (val->entirely_optimized_out ())
{
if (options->summary && !val_print_scalar_type_p (val->type ()))
gdb_printf (stream, "...");
return 0;
}
- if (value_entirely_unavailable (val))
+ if (val->entirely_unavailable ())
{
if (options->summary && !val_print_scalar_type_p (val->type ()))
gdb_printf (stream, "...");
/* A scalar object that does not have all bits available can't be
printed, because all bits contribute to its representation. */
- if (value_bits_any_optimized_out (val, 0,
- TARGET_CHAR_BIT * type->length ()))
+ if (val->bits_any_optimized_out (0,
+ TARGET_CHAR_BIT * type->length ()))
val_print_optimized_out (val, stream);
- else if (!value_bytes_available (val, 0, type->length ()))
+ else if (!val->bytes_available (0, type->length ()))
val_print_unavailable (stream);
else
print_scalar_formatted (valaddr, type, options, size, stream);
UINT_MAX (unlimited). */
if (options->repeat_count_threshold < UINT_MAX)
{
- bool unavailable = value_entirely_unavailable (element);
- bool available = value_entirely_available (element);
+ bool unavailable = element->entirely_unavailable ();
+ bool available = element->entirely_available ();
while (rep1 < len)
{
rep1 * bit_stride,
bit_stride);
bool repeated = ((available
- && value_entirely_available (rep_elt)
+ && rep_elt->entirely_available ()
&& element->contents_eq (rep_elt))
|| (unavailable
- && value_entirely_unavailable (rep_elt)));
+ && rep_elt->entirely_unavailable ()));
if (!repeated)
break;
++reps;
}
int
-value_bits_available (const struct value *value,
- LONGEST offset, ULONGEST length)
+value::bits_available (LONGEST offset, ULONGEST length) const
{
- gdb_assert (!value->m_lazy);
+ gdb_assert (!m_lazy);
/* Don't pretend we have anything available there in the history beyond
the boundaries of the value recorded. It's not like inferior memory
where there is actual stuff underneath. */
- ULONGEST val_len = TARGET_CHAR_BIT * value->enclosing_type ()->length ();
- return !((value->m_in_history
+ ULONGEST val_len = TARGET_CHAR_BIT * enclosing_type ()->length ();
+ return !((m_in_history
&& (offset < 0 || offset + length > val_len))
- || ranges_contain (value->m_unavailable, offset, length));
+ || ranges_contain (m_unavailable, offset, length));
}
int
-value_bytes_available (const struct value *value,
- LONGEST offset, ULONGEST length)
+value::bytes_available (LONGEST offset, ULONGEST length) const
{
ULONGEST sign = (1ULL << (sizeof (ULONGEST) * 8 - 1)) / TARGET_CHAR_BIT;
ULONGEST mask = (sign << 1) - 1;
|| (length > 0 && (~offset & (offset + length - 1) & sign) != 0))
error (_("Integer overflow in data location calculation"));
- return value_bits_available (value,
- offset * TARGET_CHAR_BIT,
- length * TARGET_CHAR_BIT);
+ return bits_available (offset * TARGET_CHAR_BIT, length * TARGET_CHAR_BIT);
}
int
-value_bits_any_optimized_out (const struct value *value, int bit_offset, int bit_length)
+value::bits_any_optimized_out (int bit_offset, int bit_length) const
{
- gdb_assert (!value->m_lazy);
+ gdb_assert (!m_lazy);
- return ranges_contain (value->m_optimized_out, bit_offset, bit_length);
+ return ranges_contain (m_optimized_out, bit_offset, bit_length);
}
int
-value_entirely_available (struct value *value)
+value::entirely_available ()
{
/* We can only tell whether the whole value is available when we try
to read it. */
- if (value->m_lazy)
- value->fetch_lazy ();
+ if (m_lazy)
+ fetch_lazy ();
- if (value->m_unavailable.empty ())
+ if (m_unavailable.empty ())
return 1;
return 0;
}
-/* Returns true if VALUE is entirely covered by RANGES. If the value
- is lazy, it'll be read now. Note that RANGE is a pointer to
- pointer because reading the value might change *RANGE. */
+/* See value.h. */
-static int
-value_entirely_covered_by_range_vector (struct value *value,
- const std::vector<range> &ranges)
+int
+value::entirely_covered_by_range_vector (const std::vector<range> &ranges)
{
/* We can only tell whether the whole value is optimized out /
unavailable when we try to read it. */
- if (value->m_lazy)
- value->fetch_lazy ();
+ if (m_lazy)
+ fetch_lazy ();
if (ranges.size () == 1)
{
const struct range &t = ranges[0];
if (t.offset == 0
- && t.length == (TARGET_CHAR_BIT
- * value->enclosing_type ()->length ()))
+ && t.length == TARGET_CHAR_BIT * enclosing_type ()->length ())
return 1;
}
return 0;
}
-int
-value_entirely_unavailable (struct value *value)
-{
- return value_entirely_covered_by_range_vector (value, value->m_unavailable);
-}
-
-int
-value_entirely_optimized_out (struct value *value)
-{
- return value_entirely_covered_by_range_vector (value, value->m_optimized_out);
-}
-
/* Insert into the vector pointed to by VECTORP the bit range starting of
OFFSET bits, and extending for the next LENGTH bits. */
}
void
-mark_value_bits_unavailable (struct value *value,
- LONGEST offset, ULONGEST length)
+value::mark_bits_unavailable (LONGEST offset, ULONGEST length)
{
- insert_into_bit_range_vector (&value->m_unavailable, offset, length);
+ insert_into_bit_range_vector (&m_unavailable, offset, length);
}
void
-mark_value_bytes_unavailable (struct value *value,
- LONGEST offset, ULONGEST length)
+value::mark_bytes_unavailable (LONGEST offset, ULONGEST length)
{
- mark_value_bits_unavailable (value,
- offset * TARGET_CHAR_BIT,
- length * TARGET_CHAR_BIT);
+ mark_bits_unavailable (offset * TARGET_CHAR_BIT,
+ length * TARGET_CHAR_BIT);
}
/* Find the first range in RANGES that overlaps the range defined by
{
struct value *retval = value::allocate_lazy (type);
- mark_value_bytes_optimized_out (retval, 0, type->length ());
+ retval->mark_bytes_optimized_out (0, type->length ());
retval->set_lazy (0);
return retval;
}
if (result->is_pointer_or_reference ()
&& (check_typedef (result->target_type ())->code ()
== TYPE_CODE_STRUCT)
- && !value_optimized_out (value))
+ && !value->optimized_out ())
{
struct type *real_type;
/* The overwritten DST range gets unavailability ORed in, not
replaced. Make sure to remember to implement replacing if it
turns out actually necessary. */
- gdb_assert (value_bytes_available (dst, dst_offset, length));
- gdb_assert (!value_bits_any_optimized_out (dst,
- TARGET_CHAR_BIT * dst_offset,
- TARGET_CHAR_BIT * length));
+ gdb_assert (dst->bytes_available (dst_offset, length));
+ gdb_assert (!dst->bits_any_optimized_out (TARGET_CHAR_BIT * dst_offset,
+ TARGET_CHAR_BIT * length));
/* Copy the data. */
gdb::array_view<gdb_byte> dst_contents
turns out actually necessary. */
LONGEST dst_offset = dst_bit_offset / TARGET_CHAR_BIT;
LONGEST length = bit_length / TARGET_CHAR_BIT;
- gdb_assert (value_bytes_available (dst, dst_offset, length));
- gdb_assert (!value_bits_any_optimized_out (dst, dst_bit_offset,
- bit_length));
+ gdb_assert (dst->bytes_available (dst_offset, length));
+ gdb_assert (!dst->bits_any_optimized_out (dst_bit_offset,
+ bit_length));
/* Copy the data. */
gdb::array_view<gdb_byte> dst_contents = dst->contents_all_raw ();
}
int
-value_optimized_out (struct value *value)
+value::optimized_out ()
{
- if (value->m_lazy)
+ if (m_lazy)
{
/* See if we can compute the result without fetching the
value. */
- if (VALUE_LVAL (value) == lval_memory)
+ if (VALUE_LVAL (this) == lval_memory)
return false;
- else if (VALUE_LVAL (value) == lval_computed)
+ else if (VALUE_LVAL (this) == lval_computed)
{
- const struct lval_funcs *funcs = value->m_location.computed.funcs;
+ const struct lval_funcs *funcs = m_location.computed.funcs;
if (funcs->is_optimized_out != nullptr)
- return funcs->is_optimized_out (value);
+ return funcs->is_optimized_out (this);
}
/* Fall back to fetching. */
try
{
- value->fetch_lazy ();
+ fetch_lazy ();
}
catch (const gdb_exception_error &ex)
{
}
}
- return !value->m_optimized_out.empty ();
+ return !m_optimized_out.empty ();
}
/* Mark contents of VALUE as optimized out, starting at OFFSET bytes, and
the following LENGTH bytes. */
void
-mark_value_bytes_optimized_out (struct value *value, int offset, int length)
+value::mark_bytes_optimized_out (int offset, int length)
{
- mark_value_bits_optimized_out (value,
- offset * TARGET_CHAR_BIT,
- length * TARGET_CHAR_BIT);
+ mark_bits_optimized_out (offset * TARGET_CHAR_BIT,
+ length * TARGET_CHAR_BIT);
}
/* See value.h. */
void
-mark_value_bits_optimized_out (struct value *value,
- LONGEST offset, LONGEST length)
+value::mark_bits_optimized_out (LONGEST offset, LONGEST length)
{
- insert_into_bit_range_vector (&value->m_optimized_out, offset, length);
+ insert_into_bit_range_vector (&m_optimized_out, offset, length);
}
int
val->m_limited_length = m_limited_length;
if (!val->lazy ()
- && !(value_entirely_optimized_out (val)
- || value_entirely_unavailable (val)))
+ && !(val->entirely_optimized_out ()
+ || val->entirely_unavailable ()))
{
ULONGEST length = val->m_limited_length;
if (length == 0)
ULONGEST limit = val->m_limited_length;
if (limit != 0)
- mark_value_bytes_unavailable (val, limit,
- enclosing_type->length () - limit);
+ val->mark_bytes_unavailable (limit,
+ enclosing_type->length () - limit);
/* Mark the value as recorded in the history for the availability check. */
val->m_in_history = true;
gdb_assert (val != NULL);
bit_offset = embedded_offset * TARGET_CHAR_BIT + bitpos;
- if (value_bits_any_optimized_out (val, bit_offset, bitsize)
- || !value_bits_available (val, bit_offset, bitsize))
+ if (val->bits_any_optimized_out (bit_offset, bitsize)
+ || !val->bits_available (bit_offset, bitsize))
return 0;
*result = unpack_bits_as_long (field_type, valaddr + embedded_offset,
user_reg_map_regnum_to_name (gdbarch, regnum));
gdb_printf (&debug_file, "->");
- if (value_optimized_out (new_val))
+ if (new_val->optimized_out ())
{
gdb_printf (&debug_file, " ");
val_print_optimized_out (new_val, &debug_file);
value_ref_ptr val = release_value (value::allocate_optimized_out (type));
value_ref_ptr copy = release_value (val.get ()->copy ());
- SELF_CHECK (value_entirely_optimized_out (val.get ()));
- SELF_CHECK (value_entirely_optimized_out (copy.get ()));
+ SELF_CHECK (val.get ()->entirely_optimized_out ());
+ SELF_CHECK (copy.get ()->entirely_optimized_out ());
}
} /* namespace selftests */
void set_lazy (int val)
{ m_lazy = val; }
-
/* If a value represents a C++ object, then the `type' field gives the
object's compile-time type. If the object actually belongs to some
class derived from `type', perhaps with other base classes and
drops to 0, it will be freed. */
void decref ();
+ /* Given a value, determine whether the contents bytes starting at
+ OFFSET and extending for LENGTH bytes are available. This returns
+ nonzero if all bytes in the given range are available, zero if any
+ byte is unavailable. */
+ int bytes_available (LONGEST offset, ULONGEST length) const;
+
+ /* Given a value, determine whether the contents bits starting at
+ OFFSET and extending for LENGTH bits are available. This returns
+ nonzero if all bits in the given range are available, zero if any
+ bit is unavailable. */
+ int bits_available (LONGEST offset, ULONGEST length) const;
+
+ /* Like bytes_available, but return false if any byte in the
+ whole object is unavailable. */
+ int entirely_available ();
+
+ /* Like entirely_available, but return false if any byte in the
+ whole object is available. */
+ int entirely_unavailable ()
+ { return entirely_covered_by_range_vector (m_unavailable); }
+
+ /* Mark this value's content bytes starting at OFFSET and extending
+ for LENGTH bytes as unavailable. */
+ void mark_bytes_unavailable (LONGEST offset, ULONGEST length);
+
+ /* Mark this value's content bits starting at OFFSET and extending
+ for LENGTH bits as unavailable. */
+ void mark_bits_unavailable (LONGEST offset, ULONGEST length);
+
+ /* If nonzero, this is the value of a variable which does not actually
+ exist in the program, at least partially. If the value is lazy,
+ this may fetch it now. */
+ int optimized_out ();
+
+ /* Given a value, return true if any of the contents bits starting at
+ OFFSET and extending for LENGTH bits is optimized out, false
+ otherwise. */
+ int bits_any_optimized_out (int bit_offset, int bit_length) const;
+
+ /* Like optimized_out, but return true iff the whole value is
+ optimized out. */
+ int entirely_optimized_out ()
+ {
+ return entirely_covered_by_range_vector (m_optimized_out);
+ }
+
+ /* Mark this value's content bytes starting at OFFSET and extending
+ for LENGTH bytes as optimized out. */
+ void mark_bytes_optimized_out (int offset, int length);
+
+ /* Mark this value's content bits starting at OFFSET and extending
+ for LENGTH bits as optimized out. */
+ void mark_bits_optimized_out (LONGEST offset, LONGEST length);
+
/* Type of value; either not an lval, or one of the various
different possible kinds of lval. */
void require_not_optimized_out () const;
void require_available () const;
+
+ /* Returns true if this value is entirely covered by RANGES. If the
+ value is lazy, it'll be read now. Note that RANGE is a pointer
+ to pointer because reading the value might change *RANGE. */
+ int entirely_covered_by_range_vector (const std::vector<range> &ranges);
};
inline void
extern void error_value_optimized_out (void);
-/* If nonzero, this is the value of a variable which does not actually
- exist in the program, at least partially. If the value is lazy,
- this may fetch it now. */
-extern int value_optimized_out (struct value *value);
-
-/* Given a value, return true if any of the contents bits starting at
- OFFSET and extending for LENGTH bits is optimized out, false
- otherwise. */
-
-extern int value_bits_any_optimized_out (const struct value *value,
- int bit_offset, int bit_length);
-
-/* Like value_optimized_out, but return true iff the whole value is
- optimized out. */
-extern int value_entirely_optimized_out (struct value *value);
-
-/* Mark VALUE's content bytes starting at OFFSET and extending for
- LENGTH bytes as optimized out. */
-
-extern void mark_value_bytes_optimized_out (struct value *value,
- int offset, int length);
-
-/* Mark VALUE's content bits starting at OFFSET and extending for
- LENGTH bits as optimized out. */
-
-extern void mark_value_bits_optimized_out (struct value *value,
- LONGEST offset, LONGEST length);
-
/* Set COMPONENT's location as appropriate for a component of WHOLE
--- regardless of what kind of lvalue WHOLE is. */
extern void set_value_component_location (struct value *component,
extern struct value *coerce_array (struct value *value);
-/* Given a value, determine whether the contents bytes starting at
- OFFSET and extending for LENGTH bytes are available. This returns
- nonzero if all bytes in the given range are available, zero if any
- byte is unavailable. */
-
-extern int value_bytes_available (const struct value *value,
- LONGEST offset, ULONGEST length);
-
-/* Given a value, determine whether the contents bits starting at
- OFFSET and extending for LENGTH bits are available. This returns
- nonzero if all bits in the given range are available, zero if any
- bit is unavailable. */
-
-extern int value_bits_available (const struct value *value,
- LONGEST offset, ULONGEST length);
-
-/* Like value_bytes_available, but return false if any byte in the
- whole object is unavailable. */
-extern int value_entirely_available (struct value *value);
-
-/* Like value_entirely_available, but return false if any byte in the
- whole object is available. */
-extern int value_entirely_unavailable (struct value *value);
-
-/* Mark VALUE's content bytes starting at OFFSET and extending for
- LENGTH bytes as unavailable. */
-
-extern void mark_value_bytes_unavailable (struct value *value,
- LONGEST offset, ULONGEST length);
-
-/* Mark VALUE's content bits starting at OFFSET and extending for
- LENGTH bits as unavailable. */
-
-extern void mark_value_bits_unavailable (struct value *value,
- LONGEST offset, ULONGEST length);
-
/* Read LENGTH addressable memory units starting at MEMADDR into BUFFER,
which is (or will be copied to) VAL's contents buffer offset by
BIT_OFFSET bits. Marks value contents ranges as unavailable if