//! The default matrix data storage allocator. //! //! This will use stack-allocated buffers for matrices with dimensions known at compile-time, and //! heap-allocated buffers for matrices with at least one dimension unknown at compile-time. use std::cmp; use std::fmt; use std::mem::{self, ManuallyDrop, MaybeUninit}; use std::ptr; #[cfg(all(feature = "alloc", not(feature = "std")))] use alloc::vec::Vec; #[cfg(any(feature = "alloc", feature = "std"))] use crate::base::dimension::Dynamic; use super::Const; use crate::base::allocator::{Allocator, InnerAllocator, Reallocator}; use crate::base::array_storage::ArrayStorage; use crate::base::dimension::{Dim, DimName}; use crate::base::storage::{ ContiguousStorage, ContiguousStorageMut, InnerOwned, Storage, StorageMut, }; use crate::base::vec_storage::VecStorage; use crate::U1; /* * * Allocator. * */ /// A helper struct that controls how the storage for a matrix should be allocated. /// /// This struct is useless on its own. Instead, it's used in trait /// An allocator based on `GenericArray` and `VecStorage` for statically-sized and dynamically-sized /// matrices respectively. pub struct DefaultAllocator; // Static - Static impl InnerAllocator, Const> for DefaultAllocator { type Buffer = ArrayStorage; #[inline] fn allocate_from_iterator>( nrows: Const, ncols: Const, iter: I, ) -> Self::Buffer { let mut res = Self::allocate_uninitialized(nrows, ncols); let mut count = 0; for (res, e) in res.as_mut_slice().iter_mut().zip(iter.into_iter()) { *res = MaybeUninit::new(e); count += 1; } assert!( count == nrows.value() * ncols.value(), "Matrix init. from iterator: iterator not long enough." ); // Safety: we have initialized all entries. unsafe { , Const>>::assume_init(res) } } } impl Allocator, Const> for DefaultAllocator { #[inline] fn allocate_uninitialized( _: Const, _: Const, ) -> InnerOwned, Const, Const> { // SAFETY: An uninitialized `[MaybeUninit<_>; _]` is valid. let array = unsafe { MaybeUninit::uninit().assume_init() }; ArrayStorage(array) } #[inline] unsafe fn assume_init( uninit: , Const, Const>>::Buffer, ) -> InnerOwned, Const> { // Safety: // * The caller guarantees that all elements of the array are initialized // * `MaybeUninit` and T are guaranteed to have the same layout // * `MaybeUnint` does not drop, so there are no double-frees // And thus the conversion is safe ArrayStorage((&uninit as *const _ as *const [_; C]).read()) } /// Specifies that a given buffer's entries should be manually dropped. #[inline] fn manually_drop( buf: , Const>>::Buffer, ) -> , Const, Const>>::Buffer { // SAFETY: // * `ManuallyDrop` and T are guaranteed to have the same layout // * `ManuallyDrop` does not drop, so there are no double-frees // And thus the conversion is safe unsafe { ArrayStorage((&ManuallyDrop::new(buf) as *const _ as *const [_; C]).read()) } } } // Dynamic - Static // Dynamic - Dynamic #[cfg(any(feature = "std", feature = "alloc"))] impl InnerAllocator for DefaultAllocator { type Buffer = VecStorage; #[inline] fn allocate_from_iterator>( nrows: Dynamic, ncols: C, iter: I, ) -> Self::Buffer { let it = iter.into_iter(); let res: Vec = it.collect(); assert!(res.len() == nrows.value() * ncols.value(), "Allocation from iterator error: the iterator did not yield the correct number of elements."); VecStorage::new(nrows, ncols, res) } } impl Allocator for DefaultAllocator { #[inline] fn allocate_uninitialized(nrows: Dynamic, ncols: C) -> InnerOwned, Dynamic, C> { let mut data = Vec::new(); let length = nrows.value() * ncols.value(); data.reserve_exact(length); data.resize_with(length, MaybeUninit::uninit); VecStorage::new(nrows, ncols, data) } #[inline] unsafe fn assume_init( uninit: InnerOwned, Dynamic, C>, ) -> InnerOwned { // Avoids a double-drop. let (nrows, ncols) = uninit.shape(); let vec: Vec<_> = uninit.into(); let mut md = ManuallyDrop::new(vec); // Safety: // - MaybeUninit has the same alignment and layout as T. // - The length and capacity come from a valid vector. let new_data = Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity()); VecStorage::new(nrows, ncols, new_data) } #[inline] fn manually_drop( buf: >::Buffer, ) -> , Dynamic, C>>::Buffer { // Avoids a double-drop. let (nrows, ncols) = buf.shape(); let vec: Vec<_> = buf.into(); let mut md = ManuallyDrop::new(vec); // Safety: // - ManuallyDrop has the same alignment and layout as T. // - The length and capacity come from a valid vector. let new_data = unsafe { Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity()) }; VecStorage::new(nrows, ncols, new_data) } } // Static - Dynamic #[cfg(any(feature = "std", feature = "alloc"))] impl InnerAllocator for DefaultAllocator { type Buffer = VecStorage; #[inline] fn allocate_from_iterator>( nrows: R, ncols: Dynamic, iter: I, ) -> InnerOwned { let it = iter.into_iter(); let res: Vec = it.collect(); assert!(res.len() == nrows.value() * ncols.value(), "Allocation from iterator error: the iterator did not yield the correct number of elements."); VecStorage::new(nrows, ncols, res) } } impl Allocator for DefaultAllocator { #[inline] fn allocate_uninitialized(nrows: R, ncols: Dynamic) -> InnerOwned, R, Dynamic> { let mut data = Vec::new(); let length = nrows.value() * ncols.value(); data.reserve_exact(length); data.resize_with(length, MaybeUninit::uninit); VecStorage::new(nrows, ncols, data) } #[inline] unsafe fn assume_init( uninit: InnerOwned, R, Dynamic>, ) -> InnerOwned { // Avoids a double-drop. let (nrows, ncols) = uninit.shape(); let vec: Vec<_> = uninit.into(); let mut md = ManuallyDrop::new(vec); // Safety: // - MaybeUninit has the same alignment and layout as T. // - The length and capacity come from a valid vector. let new_data = Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity()); VecStorage::new(nrows, ncols, new_data) } #[inline] fn manually_drop( buf: >::Buffer, ) -> , R, Dynamic>>::Buffer { // Avoids a double-drop. let (nrows, ncols) = buf.shape(); let vec: Vec<_> = buf.into(); let mut md = ManuallyDrop::new(vec); // Safety: // - ManuallyDrop has the same alignment and layout as T. // - The length and capacity come from a valid vector. let new_data = unsafe { Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity()) }; VecStorage::new(nrows, ncols, new_data) } } /// The owned storage type for a matrix. #[repr(transparent)] pub struct Owned(pub InnerOwned) where DefaultAllocator: Allocator; impl Copy for Owned where DefaultAllocator: Allocator, InnerOwned: Copy, { } impl Clone for Owned where DefaultAllocator: Allocator, { fn clone(&self) -> Self { if Self::is_array() { // We first clone the data. let slice = unsafe { self.as_slice_unchecked() }; let vec = ManuallyDrop::new(slice.to_owned()); // We then transmute it back into an array and then an Owned. unsafe { mem::transmute_copy(&*vec.as_ptr()) } // TODO: check that the auxiliary copy is elided. } else { // We first clone the data. let clone = ManuallyDrop::new(self.as_vec_storage().clone()); // We then transmute it back into an Owned. unsafe { mem::transmute_copy(&clone) } // TODO: check that the auxiliary copy is elided. } } } impl fmt::Debug for Owned where DefaultAllocator: Allocator, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { if Self::is_array() { let slice = unsafe { self.as_slice_unchecked() }; slice.fmt(f) } else { self.as_vec_storage().fmt(f) } } } impl Owned where DefaultAllocator: Allocator, { /// Returns whether `Self` stores an [`ArrayStorage`]. fn is_array() -> bool { R::is_static() && C::is_static() } /// Returns whether `Self` stores a [`VecStorage`]. fn is_vec() -> bool { !Self::is_array() } /// Returns the underlying [`VecStorage`]. Does not do any sort of static /// type checking. /// /// # Panics /// This method will panic if `Self` does not contain a [`VecStorage`]. fn as_vec_storage(&self) -> &VecStorage { assert!(Self::is_vec()); // Safety: `self` is transparent and must contain a `VecStorage`. unsafe { &*(&self as *const _ as *const _) } } } unsafe impl Storage for Owned where DefaultAllocator: Allocator, { type RStride = U1; type CStride = R; fn ptr(&self) -> *const T { if Self::is_array() { &self as *const _ as *const T } else { self.as_vec_storage().as_vec().as_ptr() } } fn shape(&self) -> (R, C) { if Self::is_array() { (R::default(), C::default()) } else { let vec = self.as_vec_storage(); (vec.nrows, vec.ncols) } } fn strides(&self) -> (Self::RStride, Self::CStride) { if Self::is_array() { (U1::name(), R::default()) } else { let vec = self.as_vec_storage(); (U1::name(), vec.nrows) } } fn is_contiguous(&self) -> bool { true } unsafe fn as_slice_unchecked(&self) -> &[T] { if Self::is_array() { std::slice::from_raw_parts( self.ptr(), R::try_to_usize().unwrap() * C::try_to_usize().unwrap(), ) } else { self.as_vec_storage().as_vec().as_ref() } } fn into_owned(self) -> Owned { self } fn clone_owned(&self) -> Owned where T: Clone, { self.clone() } } unsafe impl StorageMut for Owned where DefaultAllocator: Allocator, { fn ptr_mut(&mut self) -> *mut T { todo!() } unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T] { todo!() } } unsafe impl ContiguousStorage for Owned where DefaultAllocator: Allocator { } unsafe impl ContiguousStorageMut for Owned where DefaultAllocator: Allocator { } /* * * Reallocator. * */ // Anything -> Static × Static impl Reallocator, Const> for DefaultAllocator where Self: Allocator, { #[inline] unsafe fn reallocate_copy( rto: Const, cto: Const, buf: InnerOwned, ) -> ArrayStorage { let mut res = , Const>>::allocate_uninitialized(rto, cto); let (rfrom, cfrom) = buf.shape(); let len_from = rfrom.value() * cfrom.value(); let len_to = rto.value() * cto.value(); ptr::copy_nonoverlapping( buf.ptr(), res.ptr_mut() as *mut T, cmp::min(len_from, len_to), ); // Safety: TODO , Const>>::assume_init(res) } } // Static × Static -> Dynamic × Any #[cfg(any(feature = "std", feature = "alloc"))] impl Reallocator, Const, Dynamic, CTo> for DefaultAllocator where CTo: Dim, { #[inline] unsafe fn reallocate_copy( rto: Dynamic, cto: CTo, buf: ArrayStorage, ) -> VecStorage { let mut res = >::allocate_uninitialized(rto, cto); let (rfrom, cfrom) = buf.shape(); let len_from = rfrom.value() * cfrom.value(); let len_to = rto.value() * cto.value(); ptr::copy_nonoverlapping( buf.ptr(), res.ptr_mut() as *mut T, cmp::min(len_from, len_to), ); >::assume_init(res) } } // Static × Static -> Static × Dynamic #[cfg(any(feature = "std", feature = "alloc"))] impl Reallocator, Const, RTo, Dynamic> for DefaultAllocator where RTo: DimName, { #[inline] unsafe fn reallocate_copy( rto: RTo, cto: Dynamic, buf: ArrayStorage, ) -> VecStorage { let mut res = >::allocate_uninitialized(rto, cto); let (rfrom, cfrom) = buf.shape(); let len_from = rfrom.value() * cfrom.value(); let len_to = rto.value() * cto.value(); ptr::copy_nonoverlapping( buf.ptr(), res.ptr_mut() as *mut T, cmp::min(len_from, len_to), ); >::assume_init(res) } } // All conversion from a dynamic buffer to a dynamic buffer. #[cfg(any(feature = "std", feature = "alloc"))] impl Reallocator for DefaultAllocator { #[inline] unsafe fn reallocate_copy( rto: Dynamic, cto: CTo, buf: VecStorage, ) -> VecStorage { let new_buf = buf.resize(rto.value() * cto.value()); VecStorage::new(rto, cto, new_buf) } } #[cfg(any(feature = "std", feature = "alloc"))] impl Reallocator for DefaultAllocator { #[inline] unsafe fn reallocate_copy( rto: RTo, cto: Dynamic, buf: VecStorage, ) -> VecStorage { let new_buf = buf.resize(rto.value() * cto.value()); VecStorage::new(rto, cto, new_buf) } } #[cfg(any(feature = "std", feature = "alloc"))] impl Reallocator for DefaultAllocator { #[inline] unsafe fn reallocate_copy( rto: Dynamic, cto: CTo, buf: VecStorage, ) -> VecStorage { let new_buf = buf.resize(rto.value() * cto.value()); VecStorage::new(rto, cto, new_buf) } } #[cfg(any(feature = "std", feature = "alloc"))] impl Reallocator for DefaultAllocator { #[inline] unsafe fn reallocate_copy( rto: RTo, cto: Dynamic, buf: VecStorage, ) -> VecStorage { let new_buf = buf.resize(rto.value() * cto.value()); VecStorage::new(rto, cto, new_buf) } }