vigna / sux-rs

Rust implementations of succinct data structures
Apache License 2.0
44 stars 7 forks source link

CompactArray backed by Mmap #10

Closed progval closed 1 year ago

progval commented 1 year ago

Hi,

In swh-graph, we use a CompactArray<Mmap>, which isn't supported since e468cedf68a16ffe0ef73b9703342f665dc6b9dc: https://gitlab.softwareheritage.org/swh/devel/swh-graph/-/blob/ec014b29c2361a13bbfa55f3f509be74572d7699/rust/src/map/node2type.rs#L10

I tried to generalize VSlice and CompactArray to non-usize types (I need u8 here) but I'm getting lost in satisfying the type-checker for these implementations (especially the interactions between VSliceCore<Type>, VSliceCore<AtomicType>, and VSliceAtomic<AtomicType>:

diff --git a/src/bits/bit_vec.rs b/src/bits/bit_vec.rs
index ba06658..38a2e45 100644
--- a/src/bits/bit_vec.rs
+++ b/src/bits/bit_vec.rs
@@ -321,14 +321,14 @@ impl CountBitVec<Vec<usize>> {
     }
 }

-impl<B: VSlice> Select for CountBitVec<B> {
+impl<B: VSlice<usize>> Select for CountBitVec<B> {
     #[inline(always)]
     unsafe fn select_unchecked(&self, rank: usize) -> usize {
         self.select_unchecked_hinted(rank, 0, 0)
     }
 }

-impl<B: VSlice> SelectHinted for CountBitVec<B> {
+impl<B: VSlice<usize>> SelectHinted for CountBitVec<B> {
     unsafe fn select_unchecked_hinted(&self, rank: usize, pos: usize, rank_at_pos: usize) -> usize {
         let mut word_index = pos / BITS;
         let bit_index = pos % BITS;
@@ -348,14 +348,14 @@ impl<B: VSlice> SelectHinted for CountBitVec<B> {
     }
 }

-impl<B: VSlice> SelectZero for CountBitVec<B> {
+impl<B: VSlice<usize>> SelectZero for CountBitVec<B> {
     #[inline(always)]
     unsafe fn select_zero_unchecked(&self, rank: usize) -> usize {
         self.select_zero_unchecked_hinted(rank, 0, 0)
     }
 }

-impl<B: VSlice> SelectZeroHinted for CountBitVec<B> {
+impl<B: VSlice<usize>> SelectZeroHinted for CountBitVec<B> {
     unsafe fn select_zero_unchecked_hinted(
         &self,
         rank: usize,
diff --git a/src/bits/compact_array.rs b/src/bits/compact_array.rs
index 6a52015..22655ba 100644
--- a/src/bits/compact_array.rs
+++ b/src/bits/compact_array.rs
@@ -97,7 +97,7 @@ impl<B> CompactArray<B> {
     }
 }

-impl<T> VSliceCore for CompactArray<T> {
+impl<T> VSliceCore<usize> for CompactArray<T> {
     #[inline(always)]
     fn bit_width(&self) -> usize {
         debug_assert!(self.bit_width <= BITS);
@@ -110,7 +110,7 @@ impl<T> VSliceCore for CompactArray<T> {
     }
 }

-impl<T: AsRef<[usize]>> VSlice for CompactArray<T> {
+impl<T: AsRef<[usize]>> VSlice<usize> for CompactArray<T> {
     #[inline]
     unsafe fn get_unchecked(&self, index: usize) -> usize {
         let pos = index * self.bit_width;
@@ -127,7 +127,7 @@ impl<T: AsRef<[usize]>> VSlice for CompactArray<T> {
     }
 }

-impl VSliceMut for CompactArray<Vec<usize>> {
+impl VSliceMut<usize> for CompactArray<Vec<usize>> {
     // We reimplement set as we have the mask in the structure.

     /// Set the element of the slice at the specified index.
@@ -169,7 +169,7 @@ impl VSliceMut for CompactArray<Vec<usize>> {
     }
 }

-impl<T: AsRef<[AtomicUsize]>> VSliceAtomic for CompactArray<T> {
+impl<T: AsRef<[AtomicUsize]>> VSliceAtomic<AtomicUsize> for CompactArray<T> {
     #[inline]
     unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> usize {
         let pos = index * self.bit_width;
diff --git a/src/dict/elias_fano.rs b/src/dict/elias_fano.rs
index b7531c5..a63c98c 100644
--- a/src/dict/elias_fano.rs
+++ b/src/dict/elias_fano.rs
@@ -259,7 +259,7 @@ impl<H, L> EliasFano<H, L> {
     }
 }

-impl<H: Select + AsRef<[usize]>, L: VSlice> IndexedDict for EliasFano<H, L> {
+impl<H: Select + AsRef<[usize]>, L: VSlice<usize>> IndexedDict for EliasFano<H, L> {
     type OutputValue = usize;
     type InputValue = usize;

@@ -308,7 +308,7 @@ where
 }

 /// An iterator streaming over the Elias--Fano representation.
-pub struct EliasFanoIterator<'a, H: Select + AsRef<[usize]>, L: VSlice> {
+pub struct EliasFanoIterator<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> {
     ef: &'a EliasFano<H, L>,
     /// The index of the next value it will be returned when `next` is called.
     index: usize,
@@ -319,7 +319,7 @@ pub struct EliasFanoIterator<'a, H: Select + AsRef<[usize]>, L: VSlice> {
     window: usize,
 }

-impl<'a, H: Select + AsRef<[usize]>, L: VSlice> EliasFanoIterator<'a, H, L> {
+impl<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> EliasFanoIterator<'a, H, L> {
     pub fn new(ef: &'a EliasFano<H, L>) -> Self {
         let word = if ef.high_bits.as_ref().is_empty() {
             0
@@ -360,7 +360,7 @@ impl<'a, H: Select + AsRef<[usize]>, L: VSlice> EliasFanoIterator<'a, H, L> {
     }
 }

-impl<'a, H: Select + AsRef<[usize]>, L: VSlice> Iterator for EliasFanoIterator<'a, H, L> {
+impl<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> Iterator for EliasFanoIterator<'a, H, L> {
     type Item = usize;

     #[inline(always)]
@@ -387,7 +387,9 @@ impl<'a, H: Select + AsRef<[usize]>, L: VSlice> Iterator for EliasFanoIterator<'
     }
 }

-impl<'a, H: Select + AsRef<[usize]>, L: VSlice> ExactSizeIterator for EliasFanoIterator<'a, H, L> {
+impl<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> ExactSizeIterator
+    for EliasFanoIterator<'a, H, L>
+{
     #[inline(always)]
     fn len(&self) -> usize {
         self.ef.len() - self.index
diff --git a/src/rank_sel/quantum_index.rs b/src/rank_sel/quantum_index.rs
index 7c700fa..fa560fc 100644
--- a/src/rank_sel/quantum_index.rs
+++ b/src/rank_sel/quantum_index.rs
@@ -28,13 +28,17 @@ use epserde::*;
 ///
 /// See [`QuantumZeroIndex`](crate::rank_sel::QuantumZeroIndex) for the same index for zeros.
 #[derive(Epserde, Debug, Clone, PartialEq, Eq, Hash)]
-pub struct QuantumIndex<B: SelectHinted, O: VSlice = Vec<usize>, const QUANTUM_LOG2: usize = 8> {
+pub struct QuantumIndex<
+    B: SelectHinted,
+    O: VSlice<usize> = Vec<usize>,
+    const QUANTUM_LOG2: usize = 8,
+> {
     bits: B,
     ones: O,
     _marker: core::marker::PhantomData<[(); QUANTUM_LOG2]>,
 }

-impl<B: SelectHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usize>
+impl<B: SelectHinted + AsRef<[usize]>, O: VSliceMut<usize>, const QUANTUM_LOG2: usize>
     QuantumIndex<B, O, QUANTUM_LOG2>
 {
     fn build_ones(&mut self) -> Result<()> {
@@ -60,7 +64,7 @@ impl<B: SelectHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usize>
 }

 /// Provide the hint to the underlying structure
-impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> Select
+impl<B: SelectHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> Select
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -74,7 +78,7 @@ impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> Select
 }

 /// If the underlying implementation has select zero, forward the methods.
-impl<B: SelectHinted + SelectZero, O: VSlice, const QUANTUM_LOG2: usize> SelectZero
+impl<B: SelectHinted + SelectZero, O: VSlice<usize>, const QUANTUM_LOG2: usize> SelectZero
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -88,8 +92,8 @@ impl<B: SelectHinted + SelectZero, O: VSlice, const QUANTUM_LOG2: usize> SelectZ
 }

 /// If the underlying implementation has hint for select zero, forward the methods.
-impl<B: SelectHinted + SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectZeroHinted
-    for QuantumIndex<B, O, QUANTUM_LOG2>
+impl<B: SelectHinted + SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize>
+    SelectZeroHinted for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
     unsafe fn select_zero_unchecked_hinted(
@@ -103,7 +107,7 @@ impl<B: SelectHinted + SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> S
     }
 }

-impl<B: SelectHinted + BitLength, O: VSlice, const QUANTUM_LOG2: usize> BitLength
+impl<B: SelectHinted + BitLength, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitLength
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -112,7 +116,7 @@ impl<B: SelectHinted + BitLength, O: VSlice, const QUANTUM_LOG2: usize> BitLengt
     }
 }

-impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> BitCount
+impl<B: SelectHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitCount
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -125,7 +129,7 @@ impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> BitCount
 impl<B: SelectHinted, T, const QUANTUM_LOG2: usize> ConvertTo<B>
     for QuantumIndex<B, Vec<T>, QUANTUM_LOG2>
 where
-    Vec<T>: VSlice,
+    Vec<T>: VSlice<usize>,
 {
     #[inline(always)]
     fn convert_to(self) -> Result<B> {
@@ -152,7 +156,7 @@ impl<B: SelectHinted + AsRef<[usize]>, const QUANTUM_LOG2: usize>
 impl<B, O, const QUANTUM_LOG2: usize> AsRef<[usize]> for QuantumIndex<B, O, QUANTUM_LOG2>
 where
     B: AsRef<[usize]> + SelectHinted,
-    O: VSlice,
+    O: VSlice<usize>,
 {
     fn as_ref(&self) -> &[usize] {
         self.bits.as_ref()
diff --git a/src/rank_sel/quantum_zero_index.rs b/src/rank_sel/quantum_zero_index.rs
index 59cb13d..658f27d 100644
--- a/src/rank_sel/quantum_zero_index.rs
+++ b/src/rank_sel/quantum_zero_index.rs
@@ -30,7 +30,7 @@ use epserde::*;
 #[derive(Epserde, Debug, Clone, PartialEq, Eq, Hash)]
 pub struct QuantumZeroIndex<
     B: SelectZeroHinted,
-    O: VSlice = Vec<usize>,
+    O: VSlice<usize> = Vec<usize>,
     const QUANTUM_LOG2: usize = 8,
 > {
     bits: B,
@@ -38,7 +38,7 @@ pub struct QuantumZeroIndex<
     _marker: core::marker::PhantomData<[(); QUANTUM_LOG2]>,
 }

-impl<B: SelectZeroHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usize>
+impl<B: SelectZeroHinted + AsRef<[usize]>, O: VSliceMut<usize>, const QUANTUM_LOG2: usize>
     QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     fn build_zeros(&mut self) -> Result<()> {
@@ -67,7 +67,7 @@ impl<B: SelectZeroHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usi
 }

 /// Provide the hint to the underlying structure
-impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectZero
+impl<B: SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> SelectZero
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -82,7 +82,7 @@ impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectZero
 }

 /// If the underlying implementation has select, forward the methods
-impl<B: SelectZeroHinted + Select, O: VSlice, const QUANTUM_LOG2: usize> Select
+impl<B: SelectZeroHinted + Select, O: VSlice<usize>, const QUANTUM_LOG2: usize> Select
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -96,7 +96,7 @@ impl<B: SelectZeroHinted + Select, O: VSlice, const QUANTUM_LOG2: usize> Select
 }

 /// If the underlying implementation has a hint for select, forward the methods
-impl<B: SelectZeroHinted + SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectHinted
+impl<B: SelectZeroHinted + SelectHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> SelectHinted
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -105,7 +105,7 @@ impl<B: SelectZeroHinted + SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> S
     }
 }

-impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> BitLength
+impl<B: SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitLength
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -114,7 +114,7 @@ impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> BitLength
     }
 }

-impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> BitCount
+impl<B: SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitCount
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -152,7 +152,7 @@ impl<B: SelectZeroHinted + AsRef<[usize]>, const QUANTUM_LOG2: usize>
 impl<B, O, const QUANTUM_LOG2: usize> AsRef<[usize]> for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 where
     B: AsRef<[usize]> + SelectZeroHinted,
-    O: VSlice,
+    O: VSlice<usize>,
 {
     fn as_ref(&self) -> &[usize] {
         self.bits.as_ref()
diff --git a/src/traits/vslice.rs b/src/traits/vslice.rs
index 9cd785d..737ef8e 100644
--- a/src/traits/vslice.rs
+++ b/src/traits/vslice.rs
@@ -34,12 +34,13 @@ and `&[AtomicUsize]` that view their elements as values with a bit width
 equal to that of `usize`. The implementations based on atomic types implements
 [`VSliceAtomic`].
 */
+use common_traits::{Atomic, AtomicNumber, Number};
 use core::sync::atomic::{AtomicUsize, Ordering};

 const BITS: usize = core::mem::size_of::<usize>() * 8;

 /// Common methods for [`VSlice`], [`VSliceMut`], and [`VSliceAtomic`]
-pub trait VSliceCore {
+pub trait VSliceCore<Item> {
     /// Return the width of the slice. All elements stored in the slice must
     /// fit within this bit width.
     fn bit_width(&self) -> usize;
@@ -81,12 +82,12 @@ macro_rules! debug_assert_bounds {
 }

 /// A value slice.
-pub trait VSlice: VSliceCore {
+pub trait VSlice<Item>: VSliceCore<Item> {
     /// Return the value at the specified index.
     ///
     /// # Safety
     /// `index` must be in [0..[len](`VSliceCore::len`)). No bounds checking is performed.
-    unsafe fn get_unchecked(&self, index: usize) -> usize;
+    unsafe fn get_unchecked(&self, index: usize) -> Item;

     /// Return the value at the specified index.
     ///
@@ -99,7 +100,7 @@ pub trait VSlice: VSliceCore {
 }

 /// A mutable value slice.
-pub trait VSliceMut: VSlice {
+pub trait VSliceMut<Item>: VSlice<Item> {
     /// Set the element of the slice at the specified index.
     /// No bounds checking is performed.
     ///
@@ -107,18 +108,18 @@ pub trait VSliceMut: VSlice {
     /// - `index` must be in [0..[len](`VSliceCore::len`));
     /// - `value` must fit withing [`VSliceCore::bit_width`] bits.
     /// No bound or bit-width check is performed.
-    unsafe fn set_unchecked(&mut self, index: usize, value: usize);
+    unsafe fn set_unchecked(&mut self, index: usize, value: Item);

     /// Set the element of the slice at the specified index.
     ///
     ///
     /// May panic if the index is not in in [0..[len](`VSliceCore::len`))
     /// or the value does not fit in [`VSliceCore::bit_width`] bits.
-    fn set(&mut self, index: usize, value: usize) {
+    fn set(&mut self, index: usize, value: Item) {
         panic_if_out_of_bounds!(index, self.len());
         let bw = self.bit_width();
-        let mask = usize::MAX.wrapping_shr(BITS as u32 - bw as u32)
-            & !((bw as isize - 1) >> (BITS - 1)) as usize;
+        let mask = usize::MAX.wrapping_shr(Self::Item::BITS as u32 - bw as u32)
+            & !((bw as isize - 1) >> (Self::Item::BITS - 1)) as usize;
         panic_if_value!(value, mask, bw);
         unsafe {
             self.set_unchecked(index, value);
@@ -127,13 +128,13 @@ pub trait VSliceMut: VSlice {
 }

 /// A thread-safe value slice supporting atomic operations.
-pub trait VSliceAtomic: VSliceCore {
+pub trait VSliceAtomic<Item: Atomic>: VSliceCore<Item> {
     /// Return the value at the specified index.
     ///
     /// # Safety
     /// `index` must be in [0..[len](`VSliceCore::len`)).
     /// No bound or bit-width check is performed.
-    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> usize;
+    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> Item::NonAtomic;

     /// Return the value at the specified index.
     ///
@@ -150,20 +151,20 @@ pub trait VSliceAtomic: VSliceCore {
     /// - `index` must be in [0..[len](`VSliceCore::len`));
     /// - `value` must fit withing [`VSliceCore::bit_width`] bits.
     /// No bound or bit-width check is performed.
-    unsafe fn set_unchecked(&self, index: usize, value: usize, order: Ordering);
+    unsafe fn set_unchecked(&self, index: usize, value: Item::NonAtomic, order: Ordering);

     /// Set the element of the slice at the specified index.
     ///
     ///
     /// May panic if the index is not in in [0..[len](`VSliceCore::len`))
     /// or the value does not fit in [`VSliceCore::bit_width`] bits.
-    fn set(&self, index: usize, value: usize, order: Ordering) {
+    fn set(&self, index: usize, value: Item::NonAtomic, order: Ordering) {
         if index >= self.len() {
             panic_if_out_of_bounds!(index, self.len());
         }
         let bw = self.bit_width();
-        let mask = usize::MAX.wrapping_shr(BITS as u32 - bw as u32)
-            & !((bw as isize - 1) >> (BITS - 1)) as usize;
+        let mask = usize::MAX.wrapping_shr(Self::Item::BITS as u32 - bw as u32)
+            & !((bw as isize - 1) >> (Self::Item::BITS - 1)) as usize;
         panic_if_value!(value, mask, bw);
         unsafe {
             self.set_unchecked(index, value, order);
@@ -171,10 +172,10 @@ pub trait VSliceAtomic: VSliceCore {
     }
 }

-impl<T: AsRef<[usize]>> VSliceCore for T {
+impl<N: Number, T: AsRef<[N]>> VSliceCore<N> for T {
     #[inline(always)]
     fn bit_width(&self) -> usize {
-        BITS
+        N::BITS
     }
     #[inline(always)]
     fn len(&self) -> usize {
@@ -182,30 +183,35 @@ impl<T: AsRef<[usize]>> VSliceCore for T {
     }
 }

-impl<T: AsRef<[usize]>> VSlice for T {
+impl<N: Number, T: AsRef<[N]>> VSlice<N> for T {
     #[inline(always)]
-    unsafe fn get_unchecked(&self, index: usize) -> usize {
+    unsafe fn get_unchecked(&self, index: usize) -> N {
         debug_assert_bounds!(index, self.len());
         *self.as_ref().get_unchecked(index)
     }
 }

-impl<T: AsRef<[AtomicUsize]> + AsRef<[usize]>> VSliceAtomic for T {
+impl<N: AtomicNumber + Number, T: AsRef<[N]> + AsRef<[N::NonAtomic]>> VSliceAtomic<N> for T where N::NonAtomic: Number
+{
     #[inline(always)]
-    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> usize {
+    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> N::NonAtomic {
         debug_assert_bounds!(index, self.len());
-        <T as AsRef<[AtomicUsize]>>::as_ref(self).get_unchecked(index).load(order)
+        <T as AsRef<[N]>>::as_ref(self)
+            .get_unchecked(index)
+            .load(order)
     }
     #[inline(always)]
-    unsafe fn set_unchecked(&self, index: usize, value: usize, order: Ordering) {
+    unsafe fn set_unchecked(&self, index: usize, value: N::NonAtomic, order: Ordering) {
         debug_assert_bounds!(index, self.len());
-        <T as AsRef<[AtomicUsize]>>::as_ref(self).get_unchecked(index).store(value, order);
+        <T as AsRef<[N]>>::as_ref(self)
+            .get_unchecked(index)
+            .store(value, order);
     }
 }

-impl<T: AsMut<[usize]> + AsRef<[usize]>> VSliceMut for T {
+impl<N: Number, T: AsMut<[N]> + AsRef<[N]>> VSliceMut<N> for T {
     #[inline(always)]
-    unsafe fn set_unchecked(&mut self, index: usize, value: usize) {
+    unsafe fn set_unchecked(&mut self, index: usize, value: N) {
         debug_assert_bounds!(index, self.len());
         *self.as_mut().get_unchecked_mut(index) = value;
     }

but I can't also provide blanket impls for N: AtomicNumber because Rust doesn't support providing blanket impls for two different traits ("some third-party struct might implement both traits").

So I also tried with macros:

diff --git a/src/bits/bit_vec.rs b/src/bits/bit_vec.rs
index ba06658..38a2e45 100644
--- a/src/bits/bit_vec.rs
+++ b/src/bits/bit_vec.rs
@@ -321,14 +321,14 @@ impl CountBitVec<Vec<usize>> {
     }
 }

-impl<B: VSlice> Select for CountBitVec<B> {
+impl<B: VSlice<usize>> Select for CountBitVec<B> {
     #[inline(always)]
     unsafe fn select_unchecked(&self, rank: usize) -> usize {
         self.select_unchecked_hinted(rank, 0, 0)
     }
 }

-impl<B: VSlice> SelectHinted for CountBitVec<B> {
+impl<B: VSlice<usize>> SelectHinted for CountBitVec<B> {
     unsafe fn select_unchecked_hinted(&self, rank: usize, pos: usize, rank_at_pos: usize) -> usize {
         let mut word_index = pos / BITS;
         let bit_index = pos % BITS;
@@ -348,14 +348,14 @@ impl<B: VSlice> SelectHinted for CountBitVec<B> {
     }
 }

-impl<B: VSlice> SelectZero for CountBitVec<B> {
+impl<B: VSlice<usize>> SelectZero for CountBitVec<B> {
     #[inline(always)]
     unsafe fn select_zero_unchecked(&self, rank: usize) -> usize {
         self.select_zero_unchecked_hinted(rank, 0, 0)
     }
 }

-impl<B: VSlice> SelectZeroHinted for CountBitVec<B> {
+impl<B: VSlice<usize>> SelectZeroHinted for CountBitVec<B> {
     unsafe fn select_zero_unchecked_hinted(
         &self,
         rank: usize,
diff --git a/src/bits/compact_array.rs b/src/bits/compact_array.rs
index 6a52015..92f43fa 100644
--- a/src/bits/compact_array.rs
+++ b/src/bits/compact_array.rs
@@ -97,7 +97,9 @@ impl<B> CompactArray<B> {
     }
 }

-impl<T> VSliceCore for CompactArray<T> {
+macro_rules! impl_vslice {
+    ($ty:ty) => {
+impl<T> VSliceCore<$ty> for CompactArray<T> {
     #[inline(always)]
     fn bit_width(&self) -> usize {
         debug_assert!(self.bit_width <= BITS);
@@ -110,9 +112,9 @@ impl<T> VSliceCore for CompactArray<T> {
     }
 }

-impl<T: AsRef<[usize]>> VSlice for CompactArray<T> {
+impl<T: AsRef<[$ty]>> VSlice<$ty> for CompactArray<T> {
     #[inline]
-    unsafe fn get_unchecked(&self, index: usize) -> usize {
+    unsafe fn get_unchecked(&self, index: usize) -> $ty {
         let pos = index * self.bit_width;
         let word_index = pos / BITS;
         let bit_index = pos % BITS;
@@ -127,7 +129,7 @@ impl<T: AsRef<[usize]>> VSlice for CompactArray<T> {
     }
 }

-impl VSliceMut for CompactArray<Vec<usize>> {
+impl VSliceMut<$ty> for CompactArray<Vec<$ty>> {
     // We reimplement set as we have the mask in the structure.

     /// Set the element of the slice at the specified index.
@@ -136,7 +138,7 @@ impl VSliceMut for CompactArray<Vec<usize>> {
     /// May panic if the index is not in in [0..[len](`VSliceCore::len`))
     /// or the value does not fit in [`VSliceCore::bit_width`] bits.
     #[inline(always)]
-    fn set(&mut self, index: usize, value: usize) {
+    fn set(&mut self, index: usize, value: $ty) {
         panic_if_out_of_bounds!(index, self.len);
         panic_if_value!(value, self.mask, self.bit_width);
         unsafe {
@@ -145,7 +147,7 @@ impl VSliceMut for CompactArray<Vec<usize>> {
     }

     #[inline]
-    unsafe fn set_unchecked(&mut self, index: usize, value: usize) {
+    unsafe fn set_unchecked(&mut self, index: usize, value: $ty) {
         let pos = index * self.bit_width;
         let word_index = pos / BITS;
         let bit_index = pos % BITS;
@@ -168,8 +170,15 @@ impl VSliceMut for CompactArray<Vec<usize>> {
         }
     }
 }
+    }
+}
+
+
+macro_rules! impl_vslice_atomic {
+    ($ty:ty) => {
+        impl_vslice!($ty);

-impl<T: AsRef<[AtomicUsize]>> VSliceAtomic for CompactArray<T> {
+impl<T: AsRef<[<$ty as Atomic>::NonAtomic]>> VSliceAtomic<$ty> for CompactArray<T> {
     #[inline]
     unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> usize {
         let pos = index * self.bit_width;
@@ -276,6 +285,11 @@ impl<T: AsRef<[AtomicUsize]>> VSliceAtomic for CompactArray<T> {
         }
     }
 }
+    }
+}
+
+impl_vslice!(usize);
+impl_vslice_atomic!(AtomicUsize);

 /// Provide conversion betweeen compact arrays whose backends
 /// are [convertible](ConvertTo) into one another.
diff --git a/src/dict/elias_fano.rs b/src/dict/elias_fano.rs
index b7531c5..a63c98c 100644
--- a/src/dict/elias_fano.rs
+++ b/src/dict/elias_fano.rs
@@ -259,7 +259,7 @@ impl<H, L> EliasFano<H, L> {
     }
 }

-impl<H: Select + AsRef<[usize]>, L: VSlice> IndexedDict for EliasFano<H, L> {
+impl<H: Select + AsRef<[usize]>, L: VSlice<usize>> IndexedDict for EliasFano<H, L> {
     type OutputValue = usize;
     type InputValue = usize;

@@ -308,7 +308,7 @@ where
 }

 /// An iterator streaming over the Elias--Fano representation.
-pub struct EliasFanoIterator<'a, H: Select + AsRef<[usize]>, L: VSlice> {
+pub struct EliasFanoIterator<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> {
     ef: &'a EliasFano<H, L>,
     /// The index of the next value it will be returned when `next` is called.
     index: usize,
@@ -319,7 +319,7 @@ pub struct EliasFanoIterator<'a, H: Select + AsRef<[usize]>, L: VSlice> {
     window: usize,
 }

-impl<'a, H: Select + AsRef<[usize]>, L: VSlice> EliasFanoIterator<'a, H, L> {
+impl<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> EliasFanoIterator<'a, H, L> {
     pub fn new(ef: &'a EliasFano<H, L>) -> Self {
         let word = if ef.high_bits.as_ref().is_empty() {
             0
@@ -360,7 +360,7 @@ impl<'a, H: Select + AsRef<[usize]>, L: VSlice> EliasFanoIterator<'a, H, L> {
     }
 }

-impl<'a, H: Select + AsRef<[usize]>, L: VSlice> Iterator for EliasFanoIterator<'a, H, L> {
+impl<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> Iterator for EliasFanoIterator<'a, H, L> {
     type Item = usize;

     #[inline(always)]
@@ -387,7 +387,9 @@ impl<'a, H: Select + AsRef<[usize]>, L: VSlice> Iterator for EliasFanoIterator<'
     }
 }

-impl<'a, H: Select + AsRef<[usize]>, L: VSlice> ExactSizeIterator for EliasFanoIterator<'a, H, L> {
+impl<'a, H: Select + AsRef<[usize]>, L: VSlice<usize>> ExactSizeIterator
+    for EliasFanoIterator<'a, H, L>
+{
     #[inline(always)]
     fn len(&self) -> usize {
         self.ef.len() - self.index
diff --git a/src/rank_sel/quantum_index.rs b/src/rank_sel/quantum_index.rs
index 7c700fa..fa560fc 100644
--- a/src/rank_sel/quantum_index.rs
+++ b/src/rank_sel/quantum_index.rs
@@ -28,13 +28,17 @@ use epserde::*;
 ///
 /// See [`QuantumZeroIndex`](crate::rank_sel::QuantumZeroIndex) for the same index for zeros.
 #[derive(Epserde, Debug, Clone, PartialEq, Eq, Hash)]
-pub struct QuantumIndex<B: SelectHinted, O: VSlice = Vec<usize>, const QUANTUM_LOG2: usize = 8> {
+pub struct QuantumIndex<
+    B: SelectHinted,
+    O: VSlice<usize> = Vec<usize>,
+    const QUANTUM_LOG2: usize = 8,
+> {
     bits: B,
     ones: O,
     _marker: core::marker::PhantomData<[(); QUANTUM_LOG2]>,
 }

-impl<B: SelectHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usize>
+impl<B: SelectHinted + AsRef<[usize]>, O: VSliceMut<usize>, const QUANTUM_LOG2: usize>
     QuantumIndex<B, O, QUANTUM_LOG2>
 {
     fn build_ones(&mut self) -> Result<()> {
@@ -60,7 +64,7 @@ impl<B: SelectHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usize>
 }

 /// Provide the hint to the underlying structure
-impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> Select
+impl<B: SelectHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> Select
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -74,7 +78,7 @@ impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> Select
 }

 /// If the underlying implementation has select zero, forward the methods.
-impl<B: SelectHinted + SelectZero, O: VSlice, const QUANTUM_LOG2: usize> SelectZero
+impl<B: SelectHinted + SelectZero, O: VSlice<usize>, const QUANTUM_LOG2: usize> SelectZero
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -88,8 +92,8 @@ impl<B: SelectHinted + SelectZero, O: VSlice, const QUANTUM_LOG2: usize> SelectZ
 }

 /// If the underlying implementation has hint for select zero, forward the methods.
-impl<B: SelectHinted + SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectZeroHinted
-    for QuantumIndex<B, O, QUANTUM_LOG2>
+impl<B: SelectHinted + SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize>
+    SelectZeroHinted for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
     unsafe fn select_zero_unchecked_hinted(
@@ -103,7 +107,7 @@ impl<B: SelectHinted + SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> S
     }
 }

-impl<B: SelectHinted + BitLength, O: VSlice, const QUANTUM_LOG2: usize> BitLength
+impl<B: SelectHinted + BitLength, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitLength
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -112,7 +116,7 @@ impl<B: SelectHinted + BitLength, O: VSlice, const QUANTUM_LOG2: usize> BitLengt
     }
 }

-impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> BitCount
+impl<B: SelectHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitCount
     for QuantumIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -125,7 +129,7 @@ impl<B: SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> BitCount
 impl<B: SelectHinted, T, const QUANTUM_LOG2: usize> ConvertTo<B>
     for QuantumIndex<B, Vec<T>, QUANTUM_LOG2>
 where
-    Vec<T>: VSlice,
+    Vec<T>: VSlice<usize>,
 {
     #[inline(always)]
     fn convert_to(self) -> Result<B> {
@@ -152,7 +156,7 @@ impl<B: SelectHinted + AsRef<[usize]>, const QUANTUM_LOG2: usize>
 impl<B, O, const QUANTUM_LOG2: usize> AsRef<[usize]> for QuantumIndex<B, O, QUANTUM_LOG2>
 where
     B: AsRef<[usize]> + SelectHinted,
-    O: VSlice,
+    O: VSlice<usize>,
 {
     fn as_ref(&self) -> &[usize] {
         self.bits.as_ref()
diff --git a/src/rank_sel/quantum_zero_index.rs b/src/rank_sel/quantum_zero_index.rs
index 59cb13d..658f27d 100644
--- a/src/rank_sel/quantum_zero_index.rs
+++ b/src/rank_sel/quantum_zero_index.rs
@@ -30,7 +30,7 @@ use epserde::*;
 #[derive(Epserde, Debug, Clone, PartialEq, Eq, Hash)]
 pub struct QuantumZeroIndex<
     B: SelectZeroHinted,
-    O: VSlice = Vec<usize>,
+    O: VSlice<usize> = Vec<usize>,
     const QUANTUM_LOG2: usize = 8,
 > {
     bits: B,
@@ -38,7 +38,7 @@ pub struct QuantumZeroIndex<
     _marker: core::marker::PhantomData<[(); QUANTUM_LOG2]>,
 }

-impl<B: SelectZeroHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usize>
+impl<B: SelectZeroHinted + AsRef<[usize]>, O: VSliceMut<usize>, const QUANTUM_LOG2: usize>
     QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     fn build_zeros(&mut self) -> Result<()> {
@@ -67,7 +67,7 @@ impl<B: SelectZeroHinted + AsRef<[usize]>, O: VSliceMut, const QUANTUM_LOG2: usi
 }

 /// Provide the hint to the underlying structure
-impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectZero
+impl<B: SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> SelectZero
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -82,7 +82,7 @@ impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectZero
 }

 /// If the underlying implementation has select, forward the methods
-impl<B: SelectZeroHinted + Select, O: VSlice, const QUANTUM_LOG2: usize> Select
+impl<B: SelectZeroHinted + Select, O: VSlice<usize>, const QUANTUM_LOG2: usize> Select
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -96,7 +96,7 @@ impl<B: SelectZeroHinted + Select, O: VSlice, const QUANTUM_LOG2: usize> Select
 }

 /// If the underlying implementation has a hint for select, forward the methods
-impl<B: SelectZeroHinted + SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> SelectHinted
+impl<B: SelectZeroHinted + SelectHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> SelectHinted
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -105,7 +105,7 @@ impl<B: SelectZeroHinted + SelectHinted, O: VSlice, const QUANTUM_LOG2: usize> S
     }
 }

-impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> BitLength
+impl<B: SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitLength
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -114,7 +114,7 @@ impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> BitLength
     }
 }

-impl<B: SelectZeroHinted, O: VSlice, const QUANTUM_LOG2: usize> BitCount
+impl<B: SelectZeroHinted, O: VSlice<usize>, const QUANTUM_LOG2: usize> BitCount
     for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 {
     #[inline(always)]
@@ -152,7 +152,7 @@ impl<B: SelectZeroHinted + AsRef<[usize]>, const QUANTUM_LOG2: usize>
 impl<B, O, const QUANTUM_LOG2: usize> AsRef<[usize]> for QuantumZeroIndex<B, O, QUANTUM_LOG2>
 where
     B: AsRef<[usize]> + SelectZeroHinted,
-    O: VSlice,
+    O: VSlice<usize>,
 {
     fn as_ref(&self) -> &[usize] {
         self.bits.as_ref()
diff --git a/src/traits/vslice.rs b/src/traits/vslice.rs
index 9cd785d..5c2a54f 100644
--- a/src/traits/vslice.rs
+++ b/src/traits/vslice.rs
@@ -34,12 +34,13 @@ and `&[AtomicUsize]` that view their elements as values with a bit width
 equal to that of `usize`. The implementations based on atomic types implements
 [`VSliceAtomic`].
 */
+use common_traits::{Atomic, AtomicNumber, Number};
 use core::sync::atomic::{AtomicUsize, Ordering};

 const BITS: usize = core::mem::size_of::<usize>() * 8;

 /// Common methods for [`VSlice`], [`VSliceMut`], and [`VSliceAtomic`]
-pub trait VSliceCore {
+pub trait VSliceCore<Item> {
     /// Return the width of the slice. All elements stored in the slice must
     /// fit within this bit width.
     fn bit_width(&self) -> usize;
@@ -81,12 +82,12 @@ macro_rules! debug_assert_bounds {
 }

 /// A value slice.
-pub trait VSlice: VSliceCore {
+pub trait VSlice<Item>: VSliceCore<Item> {
     /// Return the value at the specified index.
     ///
     /// # Safety
     /// `index` must be in [0..[len](`VSliceCore::len`)). No bounds checking is performed.
-    unsafe fn get_unchecked(&self, index: usize) -> usize;
+    unsafe fn get_unchecked(&self, index: usize) -> Item;

     /// Return the value at the specified index.
     ///
@@ -99,7 +100,7 @@ pub trait VSlice: VSliceCore {
 }

 /// A mutable value slice.
-pub trait VSliceMut: VSlice {
+pub trait VSliceMut<Item>: VSlice<Item> {
     /// Set the element of the slice at the specified index.
     /// No bounds checking is performed.
     ///
@@ -107,18 +108,18 @@ pub trait VSliceMut: VSlice {
     /// - `index` must be in [0..[len](`VSliceCore::len`));
     /// - `value` must fit withing [`VSliceCore::bit_width`] bits.
     /// No bound or bit-width check is performed.
-    unsafe fn set_unchecked(&mut self, index: usize, value: usize);
+    unsafe fn set_unchecked(&mut self, index: usize, value: Item);

     /// Set the element of the slice at the specified index.
     ///
     ///
     /// May panic if the index is not in in [0..[len](`VSliceCore::len`))
     /// or the value does not fit in [`VSliceCore::bit_width`] bits.
-    fn set(&mut self, index: usize, value: usize) {
+    fn set(&mut self, index: usize, value: Item) {
         panic_if_out_of_bounds!(index, self.len());
         let bw = self.bit_width();
-        let mask = usize::MAX.wrapping_shr(BITS as u32 - bw as u32)
-            & !((bw as isize - 1) >> (BITS - 1)) as usize;
+        let mask = usize::MAX.wrapping_shr(Self::Item::BITS as u32 - bw as u32)
+            & !((bw as isize - 1) >> (Self::Item::BITS - 1)) as usize;
         panic_if_value!(value, mask, bw);
         unsafe {
             self.set_unchecked(index, value);
@@ -127,13 +128,13 @@ pub trait VSliceMut: VSlice {
 }

 /// A thread-safe value slice supporting atomic operations.
-pub trait VSliceAtomic: VSliceCore {
+pub trait VSliceAtomic<Item: Atomic>: VSliceCore<Item::NonAtomic> {
     /// Return the value at the specified index.
     ///
     /// # Safety
     /// `index` must be in [0..[len](`VSliceCore::len`)).
     /// No bound or bit-width check is performed.
-    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> usize;
+    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> Item::NonAtomic;

     /// Return the value at the specified index.
     ///
@@ -150,20 +151,20 @@ pub trait VSliceAtomic: VSliceCore {
     /// - `index` must be in [0..[len](`VSliceCore::len`));
     /// - `value` must fit withing [`VSliceCore::bit_width`] bits.
     /// No bound or bit-width check is performed.
-    unsafe fn set_unchecked(&self, index: usize, value: usize, order: Ordering);
+    unsafe fn set_unchecked(&self, index: usize, value: Item::NonAtomic, order: Ordering);

     /// Set the element of the slice at the specified index.
     ///
     ///
     /// May panic if the index is not in in [0..[len](`VSliceCore::len`))
     /// or the value does not fit in [`VSliceCore::bit_width`] bits.
-    fn set(&self, index: usize, value: usize, order: Ordering) {
+    fn set(&self, index: usize, value: Item::NonAtomic, order: Ordering) {
         if index >= self.len() {
             panic_if_out_of_bounds!(index, self.len());
         }
         let bw = self.bit_width();
-        let mask = usize::MAX.wrapping_shr(BITS as u32 - bw as u32)
-            & !((bw as isize - 1) >> (BITS - 1)) as usize;
+        let mask = usize::MAX.wrapping_shr(Self::Item::BITS as u32 - bw as u32)
+            & !((bw as isize - 1) >> (Self::Item::BITS - 1)) as usize;
         panic_if_value!(value, mask, bw);
         unsafe {
             self.set_unchecked(index, value, order);
@@ -171,10 +172,12 @@ pub trait VSliceAtomic: VSliceCore {
     }
 }

-impl<T: AsRef<[usize]>> VSliceCore for T {
+macro_rules! impl_vslice {
+    ($ty:ty) => {
+impl<T: AsRef<[$ty]>> VSliceCore<$ty> for T {
     #[inline(always)]
     fn bit_width(&self) -> usize {
-        BITS
+        std::mem::size_of::<$ty>()
     }
     #[inline(always)]
     fn len(&self) -> usize {
@@ -182,31 +185,46 @@ impl<T: AsRef<[usize]>> VSliceCore for T {
     }
 }

-impl<T: AsRef<[usize]>> VSlice for T {
+impl<T: AsRef<[$ty]>> VSlice<$ty> for T {
     #[inline(always)]
-    unsafe fn get_unchecked(&self, index: usize) -> usize {
+    unsafe fn get_unchecked(&self, index: usize) -> $ty {
         debug_assert_bounds!(index, self.len());
         *self.as_ref().get_unchecked(index)
     }
 }

-impl<T: AsRef<[AtomicUsize]> + AsRef<[usize]>> VSliceAtomic for T {
+impl<T: AsMut<[$ty]> + AsRef<[$ty]>> VSliceMut<$ty> for T {
     #[inline(always)]
-    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> usize {
+    unsafe fn set_unchecked(&mut self, index: usize, value: $ty) {
         debug_assert_bounds!(index, self.len());
-        <T as AsRef<[AtomicUsize]>>::as_ref(self).get_unchecked(index).load(order)
+        *self.as_mut().get_unchecked_mut(index) = value;
     }
-    #[inline(always)]
-    unsafe fn set_unchecked(&self, index: usize, value: usize, order: Ordering) {
-        debug_assert_bounds!(index, self.len());
-        <T as AsRef<[AtomicUsize]>>::as_ref(self).get_unchecked(index).store(value, order);
+}
     }
 }

-impl<T: AsMut<[usize]> + AsRef<[usize]>> VSliceMut for T {
+macro_rules! impl_vslice_atomic {
+    ($ty:ty) => {
+        impl_vslice!($ty);
+
+impl<T: AsRef<[$ty]> + AsRef<[<$ty as Atomic>::NonAtomic]>> VSliceAtomic<$ty> for T {
     #[inline(always)]
-    unsafe fn set_unchecked(&mut self, index: usize, value: usize) {
+    unsafe fn get_unchecked(&self, index: usize, order: Ordering) -> <$ty as Atomic>::NonAtomic {
         debug_assert_bounds!(index, self.len());
-        *self.as_mut().get_unchecked_mut(index) = value;
+        <T as AsRef<[$ty]>>::as_ref(self)
+            .get_unchecked(index)
+            .load(order)
+    }
+    #[inline(always)]
+    unsafe fn set_unchecked(&self, index: usize, value: <$ty as Atomic>::NonAtomic, order: Ordering) {
+        debug_assert_bounds!(index, self.len());
+        <T as AsRef<[$ty]>>::as_ref(self)
+            .get_unchecked(index)
+            .store(value, order);
     }
 }
+    }
+}
+
+impl_vslice!(usize);
+impl_vslice_atomic!(AtomicUsize);

Any idea to resolve this?

vigna commented 1 year ago

Awwwwww... that's me cleaning up the code asking Tommaso "are we using this?" and of course he says no because we aren't using it. But. Other. People. Might. 🙈

Well, one possibility is simply resurrecting the code. It should still work. Let me have a look.

BTW, I don't know if that would be OK for you, but probably it would be sufficient to implement VSlice for &[u8], since MMap implements AsRef<[u8]>.

vigna commented 1 year ago

So, from what I see you need MmapMut. Question: can't you transmute the &mut [u8] for MmapMut into a &mut[usize]? Because CompactArray is implemented for that.

Note that there was a too strong trait bound for CompactArray (Vec instead of AsRef<[usize]> + AsMut<[usize]>) that I just fixed.

vigna commented 1 year ago

Like, this works:

use std::fs::{File, OpenOptions};
use std::io::Write;
use sux::prelude::CompactArray;
use sux::prelude::*;

pub fn main() {
    let mut file = File::create("test.bin").unwrap();
    file.write_all(&[0_u8, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15])
        .unwrap();
    drop(file);
    let file = OpenOptions::new()
        .read(true)
        .write(true)
        .open("test.bin")
        .unwrap();
    let mut mmap = unsafe {
        mmap_rs::MmapOptions::new(16)
            .unwrap()
            .with_file(file, 0)
            .map_mut()
            .unwrap()
    };

    let mmap_usize = unsafe { std::slice::from_raw_parts_mut(mmap.as_mut_ptr() as *mut usize, 2) };
    let mut c = unsafe { CompactArray::from_raw_parts(mmap_usize, 4, 32) };
    for i in 0..c.len() {
        println!("{}", c.get(i));
        c.set(i, 5);
        println!("{}", c.get(i));
    }
}

You can make it healthier using bytemuck, transmute, align_to, etc.

progval commented 1 year ago

can't you transmute the &mut [u8] for MmapMut into a &mut[usize]?

I'm afraid not, it's not guaranteed to be padded to end with a whole usize

vigna commented 1 year ago

Er... ahem... huh... I'm afraid you're not familiar with the code you used up to now 😂

impl VSlice for mmap_rs::MmapMut {
    #[inline(always)]
    unsafe fn get_unchecked(&self, index: usize) -> u64 {
        debug_assert!(index < self.len(), "{} {}", index, self.len());
        let ptr = (self.as_ptr() as *const u64).add(index);
        std::ptr::read(ptr)
   }
}
progval commented 1 year ago

Ah, my bad, it is padded. I'll try your suggestions, thanks

vigna commented 1 year ago

No, I mean, I didn't generate your data so I don't know but the main() I wrote took the code exactly from the implementation you were using. Either none or both work...

progval commented 1 year ago

I mean I checked my existing file again, and it does have padding.

As my Node2Type struct needs to own the mmap, I'll have to call CompactArray::from_raw_parts on every call to Node2Type::get or Node2Type::set. It looks like it would be "free" with the current CompactArray code, but do you expect it to remain so in the future?

progval commented 1 year ago

I did it by turning my Node2Type<B: VSlice> into a pair of traits (Node2Type and Node2TypeMut) and a pair of structs (one for generic AsRef<[usize]> / AsMut<[usize]> that I could probably generalize to VSlice; and one for Mmap/MmapMut), by calling CompactArray::from_raw_parts on every access: https://gitlab.softwareheritage.org/swh/devel/swh-graph/-/blob/db07cb81f722549085f0f694bdcd204378fc4d7c/rust/src/map/node2type.rs

I didn't check the compiler actually makes it zero-cost though

vigna commented 1 year ago

But if you implement AsRef<[usize]> for MMap it should work, no? At that point the current VSlice implementation will pick it up.

Am I missing something?

progval commented 1 year ago

Ah yes, of course. I needed to take a step back, thanks

(and I had to introduce a newtype as both AsRef and Mmap are external to my crate, but no big deal)