use crate::{
error::{Error, Result},
interned::{GlobalState, Intern, InternTarget, Interned},
- loc::{BaseTy, Loc, LocFields, LocKind, Ty, TyFields},
+ loc::{Loc, LocFields, LocKind, Ty},
};
use enum_map::{enum_map, EnumMap};
use num_bigint::BigUint;
use num_traits::Zero;
+use once_cell::race::OnceBox;
use serde::{Deserialize, Serialize};
use std::{
- borrow::{Borrow, Cow},
+ borrow::Borrow,
cell::Cell,
- collections::BTreeMap,
+ collections::{
+ btree_map::{self, Entry},
+ BTreeMap,
+ },
fmt,
hash::Hash,
- iter::{FusedIterator, Peekable},
+ iter::FusedIterator,
+ mem,
num::NonZeroU32,
- ops::{
- BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, ControlFlow, Range, Sub,
- SubAssign,
- },
+ ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Range, Sub, SubAssign},
};
+#[inline]
+fn zero_biguint<'a>() -> &'a BigUint {
+ static ZERO: OnceBox<BigUint> = OnceBox::new();
+ ZERO.get_or_init(
+ #[cold]
+ || BigUint::zero().into(),
+ )
+}
+
#[derive(Deserialize)]
struct LocSetSerialized {
- reg_len_to_starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
+ starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
}
impl TryFrom<LocSetSerialized> for LocSet {
type Error = Error;
fn try_from(value: LocSetSerialized) -> Result<Self, Self::Error> {
- Self::from_reg_len_to_starts_map(value.reg_len_to_starts_map)
+ Self::from_starts_map(value.starts_map)
}
}
#[derive(Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(try_from = "LocSetSerialized")]
pub struct LocSet {
- reg_len_to_starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
+ starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
+}
+
+impl<'a> arbitrary::Arbitrary<'a> for LocSet {
+ fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result<Self> {
+ u.arbitrary_iter()?.collect()
+ }
}
/// computes same value as `a & !b`, but more efficiently
impl LocSet {
pub fn arbitrary_with_ty(
- ty: Option<Ty>,
+ ty: Ty,
u: &mut arbitrary::Unstructured<'_>,
) -> arbitrary::Result<Self> {
- let Some(ty) = ty else {
- return Ok(Self::new());
- };
let kinds = ty.base_ty.loc_kinds();
type Mask = u128;
let kinds: Vec<_> = if kinds.len() > Mask::BITS as usize {
let byte_count = (bit_count + u8::BITS - 1) / u8::BITS;
let bytes = u.bytes(byte_count as usize)?;
starts[kind] = BigUint::from_bytes_le(bytes);
+ starts[kind] &= (BigUint::from(1u8) << bit_count) - 1u8;
all_zero &= starts[kind].is_zero();
}
if all_zero {
Ok(Loc::arbitrary_with_ty(ty, u)?.into())
} else {
- Ok(Self::from_parts(starts, Some(ty))?)
+ Ok(Self::from_starts_map_iter_unchecked([(ty.reg_len, starts)]))
}
}
- pub fn starts(&self) -> &EnumMap<LocKind, BigUint> {
- &self.starts
- }
- pub fn stops(&self) -> EnumMap<LocKind, BigUint> {
- let Some(ty) = self.ty else {
- return EnumMap::default();
- };
- enum_map! {kind => &self.starts[kind] << ty.reg_len.get()}
+ pub fn starts(&self, reg_len: NonZeroU32, kind: LocKind) -> &BigUint {
+ self.starts_map
+ .get(®_len)
+ .map(|v| &v[kind])
+ .unwrap_or_else(zero_biguint)
}
- pub fn ty(&self) -> Option<Ty> {
- self.ty
+ pub fn stops(&self, reg_len: NonZeroU32, kind: LocKind) -> BigUint {
+ self.starts(reg_len, kind) << reg_len.get()
}
- pub fn kinds(&self) -> impl Iterator<Item = LocKind> + '_ {
- self.starts
- .iter()
- .filter_map(|(kind, starts)| if starts.is_zero() { None } else { Some(kind) })
+ pub fn starts_map(&self) -> &BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>> {
+ &self.starts_map
}
- pub fn reg_len(&self) -> Option<NonZeroU32> {
- self.ty.map(|v| v.reg_len)
+ pub const fn new() -> Self {
+ Self {
+ starts_map: BTreeMap::new(),
+ }
}
- pub fn base_ty(&self) -> Option<BaseTy> {
- self.ty.map(|v| v.base_ty)
+ /// filters out empty entries, but doesn't do any other checks
+ fn from_starts_map_iter_unchecked(
+ starts_map: impl IntoIterator<Item = (NonZeroU32, EnumMap<LocKind, BigUint>)>,
+ ) -> Self {
+ Self {
+ starts_map: starts_map
+ .into_iter()
+ .filter(|(_, starts)| !starts.iter().all(|(_, starts)| starts.is_zero()))
+ .collect(),
+ }
}
- pub fn new() -> Self {
- Self::default()
+ fn for_each_reg_len_filtering_out_empty_entries(
+ &mut self,
+ mut f: impl FnMut(NonZeroU32, &mut EnumMap<LocKind, BigUint>),
+ ) {
+ self.starts_map.retain(|®_len, starts| {
+ f(reg_len, starts);
+ !Self::is_entry_empty(starts)
+ });
}
- pub fn from_parts(starts: EnumMap<LocKind, BigUint>, ty: Option<Ty>) -> Result<Self> {
- let mut empty = true;
- for (kind, starts) in &starts {
- if !starts.is_zero() {
- empty = false;
- let expected_ty = Ty::new_or_scalar(TyFields {
- base_ty: kind.base_ty(),
- reg_len: ty.map(|v| v.reg_len).unwrap_or(nzu32_lit!(1)),
- });
- if ty != Some(expected_ty) {
- return Err(Error::TyMismatch {
- ty,
- expected_ty: Some(expected_ty),
- });
+ /// helper for binary operations that keeps Locs not present in rhs
+ fn bin_op_keep_helper(
+ &mut self,
+ rhs: &Self,
+ mut f: impl FnMut(NonZeroU32, &mut EnumMap<LocKind, BigUint>, &EnumMap<LocKind, BigUint>),
+ ) {
+ rhs.starts_map.iter().for_each(|(®_len, rhs_starts)| {
+ match self.starts_map.entry(reg_len) {
+ Entry::Vacant(entry) => {
+ let mut lhs_starts = EnumMap::default();
+ f(reg_len, &mut lhs_starts, rhs_starts);
+ if !Self::is_entry_empty(&lhs_starts) {
+ entry.insert(lhs_starts);
+ }
}
- // bits() is one past max bit set, so use >= rather than >
- if starts.bits() >= Loc::max_start(kind, expected_ty.reg_len)? as u64 {
- return Err(Error::StartNotInValidRange);
+ Entry::Occupied(mut entry) => {
+ f(reg_len, entry.get_mut(), rhs_starts);
+ if Self::is_entry_empty(entry.get()) {
+ entry.remove();
+ }
}
}
- }
- if empty && ty.is_some() {
- Err(Error::TyMismatch {
- ty,
- expected_ty: None,
- })
- } else {
- Ok(Self { starts, ty })
- }
+ });
+ }
+ fn is_entry_empty(starts: &EnumMap<LocKind, BigUint>) -> bool {
+ starts.iter().all(|(_, starts)| starts.is_zero())
+ }
+ pub fn from_starts_map(
+ mut starts_map: BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>,
+ ) -> Result<Self> {
+ let mut error = Ok(());
+ starts_map.retain(|®_len, starts| {
+ if error.is_err() {
+ return false;
+ }
+ let mut any_locs = false;
+ for (kind, starts) in starts {
+ if !starts.is_zero() {
+ any_locs = true;
+ error = (|| {
+ // bits() is one past max bit set, so use >= rather than >
+ if starts.bits() >= Loc::max_start(kind, reg_len)? as u64 {
+ return Err(Error::StartNotInValidRange);
+ }
+ Ok(())
+ })();
+ if error.is_err() {
+ return false;
+ }
+ }
+ }
+ any_locs
+ });
+ Ok(Self { starts_map })
}
pub fn clear(&mut self) {
- for v in self.starts.values_mut() {
- v.assign_from_slice(&[]);
- }
+ self.starts_map.clear();
}
pub fn contains_exact(&self, value: Loc) -> bool {
- Some(value.ty()) == self.ty && self.starts[value.kind].bit(value.start as _)
- }
- pub fn try_insert(&mut self, value: Loc) -> Result<bool> {
- if self.is_empty() {
- self.ty = Some(value.ty());
- self.starts[value.kind].set_bit(value.start as u64, true);
- return Ok(true);
- };
- let ty = Some(value.ty());
- if ty != self.ty {
- return Err(Error::TyMismatch {
- ty,
- expected_ty: self.ty,
- });
- }
- let retval = !self.starts[value.kind].bit(value.start as u64);
- self.starts[value.kind].set_bit(value.start as u64, true);
- Ok(retval)
+ self.starts(value.reg_len, value.kind).bit(value.start as _)
}
pub fn insert(&mut self, value: Loc) -> bool {
- self.try_insert(value).unwrap()
+ let starts = match self.starts_map.entry(value.reg_len) {
+ Entry::Occupied(entry) => entry.into_mut(),
+ Entry::Vacant(entry) => {
+ entry.insert(Default::default())[value.kind].set_bit(value.start as u64, true);
+ return true;
+ }
+ };
+ let starts = &mut starts[value.kind];
+ let retval = !starts.bit(value.start as u64);
+ starts.set_bit(value.start as u64, true);
+ retval
}
pub fn remove(&mut self, value: Loc) -> bool {
- if self.contains_exact(value) {
- self.starts[value.kind].set_bit(value.start as u64, false);
- if self.starts.values().all(BigUint::is_zero) {
- self.ty = None;
+ let Entry::Occupied(mut entry) = self.starts_map.entry(value.reg_len) else {
+ return false;
+ };
+ let starts = entry.get_mut();
+ if starts[value.kind].bit(value.start as u64) {
+ starts[value.kind].set_bit(value.start as u64, false);
+ if starts.values().all(BigUint::is_zero) {
+ entry.remove();
}
true
} else {
}
}
pub fn is_empty(&self) -> bool {
- self.ty.is_none()
+ self.starts_map.is_empty()
}
pub fn iter(&self) -> Iter<'_> {
- if let Some(ty) = self.ty {
- let mut starts = self.starts.iter().peekable();
- Iter {
- internals: Some(IterInternals {
- ty,
- start_range: get_start_range(starts.peek()),
- starts,
- }),
- }
- } else {
- Iter { internals: None }
+ Iter {
+ internals: IterInternals::new(self.starts_map.iter()),
}
}
pub fn len(&self) -> usize {
- let retval: u64 = self.starts.values().map(BigUint::count_ones).sum();
+ let retval: u64 = self
+ .starts_map
+ .values()
+ .map(|starts| starts.values().map(BigUint::count_ones).sum::<u64>())
+ .sum();
retval as usize
}
+ /// computes `self = &other - &self`
+ pub fn sub_reverse_assign(&mut self, other: impl Borrow<Self>) {
+ // TODO: make more efficient
+ let other: &Self = other.borrow();
+ *self = other - &*self;
+ }
}
#[derive(Clone, Debug)]
-struct IterInternals<I, T>
+struct IterInternalsRest<StartsMapValueIter, Starts>
where
- I: Iterator<Item = (LocKind, T)>,
- T: Clone + Borrow<BigUint>,
+ StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
+ Starts: Borrow<BigUint>,
{
- ty: Ty,
- starts: Peekable<I>,
+ reg_len: NonZeroU32,
+ starts_map_value_iter: StartsMapValueIter,
+ kind: LocKind,
+ starts: Starts,
start_range: Range<u32>,
}
-impl<I, T> IterInternals<I, T>
+impl<StartsMapValueIter, Starts> IterInternalsRest<StartsMapValueIter, Starts>
where
- I: Iterator<Item = (LocKind, T)>,
- T: Clone + Borrow<BigUint>,
+ StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
+ Starts: Borrow<BigUint>,
{
- fn next(&mut self) -> Option<Loc> {
- let IterInternals {
- ty,
- ref mut starts,
- ref mut start_range,
- } = *self;
+ fn new(reg_len: NonZeroU32, mut starts_map_value_iter: StartsMapValueIter) -> Option<Self> {
loop {
- let (kind, ref v) = *starts.peek()?;
+ let (kind, starts) = starts_map_value_iter.next()?;
+ let starts_ref: &BigUint = starts.borrow();
+ let Some(start) = starts_ref.trailing_zeros() else {
+ continue;
+ };
+ let start = start.try_into().expect("checked by LocSet constructors");
+ let end = starts_ref
+ .bits()
+ .try_into()
+ .expect("checked by LocSet constructors");
+ return Some(Self {
+ reg_len,
+ starts_map_value_iter,
+ kind,
+ starts,
+ start_range: start..end,
+ });
+ }
+ }
+ fn next(this: &mut Option<Self>) -> Option<Loc> {
+ while let Some(Self {
+ reg_len,
+ starts_map_value_iter: _,
+ kind,
+ ref starts,
+ ref mut start_range,
+ }) = *this
+ {
let Some(start) = start_range.next() else {
- starts.next();
- *start_range = get_start_range(starts.peek());
+ *this = Self::new(reg_len, this.take().expect("known to be Some").starts_map_value_iter);
continue;
};
- if v.borrow().bit(start as u64) {
+ if starts.borrow().bit(start as u64) {
return Some(
Loc::new(LocFields {
kind,
start,
- reg_len: ty.reg_len,
+ reg_len,
})
.expect("known to be valid"),
);
}
}
+ None
}
}
-fn get_start_range(v: Option<&(LocKind, impl Borrow<BigUint>)>) -> Range<u32> {
- 0..v.map(|(_, v)| v.borrow().bits() as u32).unwrap_or(0)
+#[derive(Clone, Debug)]
+struct IterInternals<StartsMapIter, RegLen, StartsMapValue, StartsMapValueIter, Starts>
+where
+ StartsMapIter: Iterator<Item = (RegLen, StartsMapValue)>,
+ RegLen: Borrow<NonZeroU32>,
+ StartsMapValue: IntoIterator<IntoIter = StartsMapValueIter>,
+ StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
+ Starts: Borrow<BigUint>,
+{
+ starts_map_iter: StartsMapIter,
+ rest: Option<IterInternalsRest<StartsMapValueIter, Starts>>,
+}
+
+impl<StartsMapIter, RegLen, StartsMapValue, StartsMapValueIter, Starts>
+ IterInternals<StartsMapIter, RegLen, StartsMapValue, StartsMapValueIter, Starts>
+where
+ StartsMapIter: Iterator<Item = (RegLen, StartsMapValue)>,
+ RegLen: Borrow<NonZeroU32>,
+ StartsMapValue: IntoIterator<IntoIter = StartsMapValueIter>,
+ StartsMapValueIter: Iterator<Item = (LocKind, Starts)>,
+ Starts: Borrow<BigUint>,
+{
+ fn new(starts_map_iter: StartsMapIter) -> Self {
+ Self {
+ starts_map_iter,
+ rest: None,
+ }
+ }
+ fn next(&mut self) -> Option<Loc> {
+ loop {
+ while self.rest.is_none() {
+ let (reg_len, starts_map_value) = self.starts_map_iter.next()?;
+ self.rest = IterInternalsRest::new(*reg_len.borrow(), starts_map_value.into_iter());
+ }
+ if let Some(loc) = IterInternalsRest::next(&mut self.rest) {
+ return Some(loc);
+ }
+ }
+ }
}
#[derive(Clone, Debug)]
pub struct Iter<'a> {
- internals: Option<IterInternals<enum_map::Iter<'a, LocKind, BigUint>, &'a BigUint>>,
+ internals: IterInternals<
+ btree_map::Iter<'a, NonZeroU32, EnumMap<LocKind, BigUint>>,
+ &'a NonZeroU32,
+ &'a EnumMap<LocKind, BigUint>,
+ enum_map::Iter<'a, LocKind, BigUint>,
+ &'a BigUint,
+ >,
}
impl Iterator for Iter<'_> {
type Item = Loc;
fn next(&mut self) -> Option<Self::Item> {
- self.internals.as_mut()?.next()
+ self.internals.next()
}
}
impl FusedIterator for Iter<'_> {}
pub struct IntoIter {
- internals: Option<IterInternals<enum_map::IntoIter<LocKind, BigUint>, BigUint>>,
+ internals: IterInternals<
+ btree_map::IntoIter<NonZeroU32, EnumMap<LocKind, BigUint>>,
+ NonZeroU32,
+ EnumMap<LocKind, BigUint>,
+ enum_map::IntoIter<LocKind, BigUint>,
+ BigUint,
+ >,
}
impl Iterator for IntoIter {
type Item = Loc;
fn next(&mut self) -> Option<Self::Item> {
- self.internals.as_mut()?.next()
+ self.internals.next()
}
}
type IntoIter = IntoIter;
fn into_iter(self) -> Self::IntoIter {
- if let Some(ty) = self.ty {
- let mut starts = self.starts.into_iter().peekable();
- IntoIter {
- internals: Some(IterInternals {
- ty,
- start_range: get_start_range(starts.peek()),
- starts,
- }),
- }
- } else {
- IntoIter { internals: None }
+ IntoIter {
+ internals: IterInternals::new(self.starts_map.into_iter()),
}
}
}
}
}
-impl<E: From<Error>> Extend<Loc> for Result<LocSet, E> {
- fn extend<T: IntoIterator<Item = Loc>>(&mut self, iter: T) {
- iter.into_iter().try_for_each(|item| {
- let Ok(loc_set) = self else {
- return ControlFlow::Break(());
- };
- match loc_set.try_insert(item) {
- Ok(_) => ControlFlow::Continue(()),
- Err(e) => {
- *self = Err(e.into());
- ControlFlow::Break(())
- }
- }
- });
- }
-}
-
impl FromIterator<Loc> for LocSet {
fn from_iter<T: IntoIterator<Item = Loc>>(iter: T) -> Self {
let mut retval = LocSet::new();
}
}
-impl<E: From<Error>> FromIterator<Loc> for Result<LocSet, E> {
- fn from_iter<T: IntoIterator<Item = Loc>>(iter: T) -> Self {
- let mut retval = Ok(LocSet::new());
- retval.extend(iter);
- retval
- }
-}
-
struct HexBigUint<'a>(&'a BigUint);
impl fmt::Debug for HexBigUint<'_> {
}
}
+struct LocSetStartsMap<'a>(&'a BTreeMap<NonZeroU32, EnumMap<LocKind, BigUint>>);
+
+impl fmt::Debug for LocSetStartsMap<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map()
+ .entries(self.0.iter().map(|(k, v)| (k, LocSetStarts(v))))
+ .finish()
+ }
+}
+
impl fmt::Debug for LocSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("LocSet")
- .field("starts", &self.starts)
+ .field("starts_map", &LocSetStartsMap(&self.starts_map))
.finish()
}
}
-macro_rules! impl_bin_op {
+macro_rules! forward_bin_op {
(
$bin_op:ident::$bin_op_fn:ident(),
$bin_assign_op:ident::$bin_assign_op_fn:ident(),
- $starts_op:expr,
- $handle_unequal_types:expr,
- $update_unequal_types:expr,
+ $bin_assign_rev_op_fn:ident(),
) => {
- impl $bin_op<&'_ LocSet> for &'_ LocSet {
- type Output = LocSet;
-
- fn $bin_op_fn(self, rhs: &'_ LocSet) -> Self::Output {
- if self.ty != rhs.ty {
- $handle_unequal_types(self, Cow::<LocSet>::Borrowed(rhs))
- } else {
- LocSet {
- starts: enum_map! {kind => $starts_op(&self.starts[kind], &rhs.starts[kind])},
- ty: self.ty,
- }
- }
- }
- }
-
- impl $bin_assign_op<&'_ LocSet> for LocSet {
- fn $bin_assign_op_fn(&mut self, rhs: &'_ LocSet) {
- if self.ty != rhs.ty {
- $update_unequal_types(self, rhs);
- } else {
- for (kind, starts) in &mut self.starts {
- let v: BigUint = std::mem::take(starts);
- *starts = $starts_op(v, &rhs.starts[kind]);
- }
- }
- }
- }
-
impl $bin_assign_op<LocSet> for LocSet {
fn $bin_assign_op_fn(&mut self, rhs: LocSet) {
self.$bin_assign_op_fn(&rhs);
type Output = LocSet;
fn $bin_op_fn(self, mut rhs: LocSet) -> Self::Output {
- if self.ty != rhs.ty {
- $handle_unequal_types(self, Cow::<LocSet>::Owned(rhs))
- } else {
- for (kind, starts) in &mut rhs.starts {
- *starts = $starts_op(&self.starts[kind], std::mem::take(starts));
+ rhs.$bin_assign_rev_op_fn(self);
+ rhs
+ }
+ }
+
+ const _: fn() = {
+ fn _check<T>()
+ where
+ for<'a> T: $bin_op<T> + $bin_op<&'a T> + $bin_assign_op<T> + $bin_assign_op<&'a T>,
+ for<'a, 'b> &'a T: $bin_op<T> + $bin_op<&'b T>,
+ {
+ }
+ _check::<LocSet>
+ };
+ };
+}
+
+impl BitAnd<&'_ LocSet> for &'_ LocSet {
+ type Output = LocSet;
+
+ fn bitand(self, rhs: &'_ LocSet) -> Self::Output {
+ LocSet::from_starts_map_iter_unchecked(self.starts_map.iter().map(|(®_len, starts)| {
+ (
+ reg_len,
+ enum_map! {kind => (&starts[kind]).bitand(rhs.starts(reg_len, kind))},
+ )
+ }))
+ }
+}
+
+impl BitAndAssign<&'_ LocSet> for LocSet {
+ fn bitand_assign(&mut self, rhs: &'_ LocSet) {
+ self.for_each_reg_len_filtering_out_empty_entries(|reg_len, starts| {
+ for (kind, starts) in starts {
+ starts.bitand_assign(rhs.starts(reg_len, kind));
+ }
+ });
+ }
+}
+
+/// helper for binary operations that keeps Locs not present in rhs
+macro_rules! impl_bin_op_keep {
+ (
+ $bin_op:ident::$bin_op_fn:ident(),
+ $bin_assign_op:ident::$bin_assign_op_fn:ident(),
+ ) => {
+ impl $bin_op<&'_ LocSet> for &'_ LocSet {
+ type Output = LocSet;
+
+ fn $bin_op_fn(self, rhs: &'_ LocSet) -> Self::Output {
+ let mut retval: LocSet = self.clone();
+ retval.$bin_assign_op_fn(rhs);
+ retval
+ }
+ }
+
+ impl $bin_assign_op<&'_ LocSet> for LocSet {
+ fn $bin_assign_op_fn(&mut self, rhs: &'_ LocSet) {
+ self.bin_op_keep_helper(rhs, |_reg_len, lhs_starts, rhs_starts| {
+ for (kind, rhs_starts) in rhs_starts {
+ lhs_starts[kind].$bin_assign_op_fn(rhs_starts);
}
- rhs
- }
+ });
}
}
};
}
-impl_bin_op! {
+forward_bin_op! {
BitAnd::bitand(),
BitAndAssign::bitand_assign(),
- BitAnd::bitand,
- |_, _| LocSet::new(),
- |lhs, _| LocSet::clear(lhs),
+ bitand_assign(),
}
-impl_bin_op! {
+impl_bin_op_keep! {
BitOr::bitor(),
BitOrAssign::bitor_assign(),
- BitOr::bitor,
- |lhs: &LocSet, rhs: Cow<LocSet>| panic!("{}", Error::TyMismatch { ty: rhs.ty, expected_ty: lhs.ty }),
- |lhs: &mut LocSet, rhs: &LocSet| panic!("{}", Error::TyMismatch { ty: rhs.ty, expected_ty: lhs.ty }),
}
-impl_bin_op! {
+forward_bin_op! {
+ BitOr::bitor(),
+ BitOrAssign::bitor_assign(),
+ bitor_assign(),
+}
+
+impl_bin_op_keep! {
+ BitXor::bitxor(),
+ BitXorAssign::bitxor_assign(),
+}
+
+forward_bin_op! {
BitXor::bitxor(),
BitXorAssign::bitxor_assign(),
- BitXor::bitxor,
- |lhs: &LocSet, rhs: Cow<LocSet>| panic!("{}", Error::TyMismatch { ty: rhs.ty, expected_ty: lhs.ty }),
- |lhs: &mut LocSet, rhs: &LocSet| panic!("{}", Error::TyMismatch { ty: rhs.ty, expected_ty: lhs.ty }),
+ bitxor_assign(),
+}
+
+impl Sub<&'_ LocSet> for &'_ LocSet {
+ type Output = LocSet;
+
+ fn sub(self, rhs: &'_ LocSet) -> Self::Output {
+ LocSet::from_starts_map_iter_unchecked(self.starts_map.iter().map(|(®_len, starts)| {
+ (
+ reg_len,
+ enum_map! {kind => and_not(&starts[kind], rhs.starts(reg_len, kind))},
+ )
+ }))
+ }
+}
+
+impl SubAssign<&'_ LocSet> for LocSet {
+ fn sub_assign(&mut self, rhs: &'_ LocSet) {
+ self.bin_op_keep_helper(rhs, |_reg_len, lhs_starts, rhs_starts| {
+ for (kind, lhs_starts) in lhs_starts {
+ let rhs_starts = &rhs_starts[kind];
+ if rhs_starts.is_zero() {
+ continue;
+ }
+ *lhs_starts = and_not(mem::take(lhs_starts), rhs_starts);
+ }
+ });
+ }
}
-impl_bin_op! {
+forward_bin_op! {
Sub::sub(),
SubAssign::sub_assign(),
- and_not,
- |lhs: &LocSet, _| lhs.clone(),
- |_, _| {},
+ sub_reverse_assign(),
}
/// the largest number of Locs in `lhs` that a single Loc
global_state: &GlobalState,
) -> Interned<LocSetMaxConflictsWith<Self>>;
fn compute_result(lhs: &Interned<LocSet>, rhs: &Self, global_state: &GlobalState) -> u32;
+ #[cfg(feature = "fuzzing")]
+ fn reference_compute_result(
+ lhs: &Interned<LocSet>,
+ rhs: &Self,
+ global_state: &GlobalState,
+ ) -> u32;
}
impl LocSetMaxConflictsWithTrait for Loc {
fn compute_result(lhs: &Interned<LocSet>, rhs: &Self, _global_state: &GlobalState) -> u32 {
// now we do the equivalent of:
- // return lhs.iter().map(|loc| rhs.conflicts(loc) as u32).sum().unwrap_or(0)
- let Some(reg_len) = lhs.reg_len() else {
- return 0;
- };
- let starts = &lhs.starts[rhs.kind];
- if starts.is_zero() {
- return 0;
+ // return lhs.iter().map(|loc| rhs.conflicts(loc) as u32).sum()
+ let mut retval = 0;
+ for (&lhs_reg_len, lhs_starts) in lhs.starts_map() {
+ let lhs_starts = &lhs_starts[rhs.kind];
+ if lhs_starts.is_zero() {
+ continue;
+ }
+ // now we do the equivalent of:
+ // retval += sum(rhs.start < lhs_start + lhs_reg_len
+ // and lhs_start < rhs.start + rhs.reg_len
+ // for lhs_start in lhs_starts)
+ let lhs_stops = lhs_starts << lhs_reg_len.get();
+
+ // find all the bit indexes `i` where `i < rhs.start + 1`
+ let lt_rhs_start_plus_1 = (BigUint::from(1u32) << (rhs.start + 1)) - 1u32;
+
+ // find all the bit indexes `i` where
+ // `i < rhs.start + rhs.reg_len + lhs_reg_len`
+ let lt_rhs_start_plus_rhs_reg_len_plus_reg_len =
+ (BigUint::from(1u32) << (rhs.start + rhs.reg_len.get() + lhs_reg_len.get())) - 1u32;
+ let lhs_stops_and_lt_rhs_start_plus_1 = &lhs_stops & lt_rhs_start_plus_1;
+ let mut included = and_not(lhs_stops, lhs_stops_and_lt_rhs_start_plus_1);
+ included &= lt_rhs_start_plus_rhs_reg_len_plus_reg_len;
+ retval += included.count_ones() as u32;
}
- // now we do the equivalent of:
- // return sum(rhs.start < start + reg_len
- // and start < rhs.start + rhs.reg_len
- // for start in starts)
- let stops = starts << reg_len.get();
-
- // find all the bit indexes `i` where `i < rhs.start + 1`
- let lt_rhs_start_plus_1 = (BigUint::from(1u32) << (rhs.start + 1)) - 1u32;
+ retval
+ }
- // find all the bit indexes `i` where
- // `i < rhs.start + rhs.reg_len + reg_len`
- let lt_rhs_start_plus_rhs_reg_len_plus_reg_len =
- (BigUint::from(1u32) << (rhs.start + rhs.reg_len.get() + reg_len.get())) - 1u32;
- let mut included = and_not(&stops, &stops & lt_rhs_start_plus_1);
- included &= lt_rhs_start_plus_rhs_reg_len_plus_reg_len;
- included.count_ones() as u32
+ #[cfg(feature = "fuzzing")]
+ fn reference_compute_result(
+ lhs: &Interned<LocSet>,
+ rhs: &Self,
+ global_state: &GlobalState,
+ ) -> u32 {
+ lhs.iter().map(|loc| rhs.conflicts(loc) as u32).sum::<u32>()
}
fn intern(
.unwrap_or(0)
}
+ #[cfg(feature = "fuzzing")]
+ fn reference_compute_result(
+ lhs: &Interned<LocSet>,
+ rhs: &Self,
+ global_state: &GlobalState,
+ ) -> u32 {
+ rhs.iter()
+ .map(|loc| lhs.clone().reference_max_conflicts_with(loc, global_state))
+ .max()
+ .unwrap_or(0)
+ }
+
fn intern(
v: LocSetMaxConflictsWith<Self>,
global_state: &GlobalState,
}
}
}
+ #[cfg(feature = "fuzzing")]
+ pub fn reference_result(&self, global_state: &GlobalState) -> u32 {
+ match self.result.get() {
+ Some(v) => v,
+ None => {
+ let retval = Rhs::reference_compute_result(&self.lhs, &self.rhs, global_state);
+ self.result.set(Some(retval));
+ retval
+ }
+ }
+ }
}
impl Interned<LocSet> {
)
.result(global_state)
}
+ #[cfg(feature = "fuzzing")]
+ pub fn reference_max_conflicts_with<Rhs>(self, rhs: Rhs, global_state: &GlobalState) -> u32
+ where
+ Rhs: LocSetMaxConflictsWithTrait,
+ LocSetMaxConflictsWith<Rhs>: InternTarget,
+ {
+ LocSetMaxConflictsWithTrait::intern(
+ LocSetMaxConflictsWith {
+ lhs: self,
+ rhs,
+ result: Cell::default(),
+ },
+ global_state,
+ )
+ .reference_result(global_state)
+ }
pub fn conflicts_with<Rhs>(self, rhs: Rhs, global_state: &GlobalState) -> bool
where
Rhs: LocSetMaxConflictsWithTrait,