use std::ops::Deref; use core::Scalar; use core::dimension::{Dim, DimName, Dynamic, U1}; use core::storage::{Storage, StorageMut, Owned, ContiguousStorage, ContiguousStorageMut}; use core::allocator::Allocator; use core::default_allocator::DefaultAllocator; #[cfg(feature = "abomonation-serialize")] use abomonation::Abomonation; /* * * Storage. * */ /// A Vec-based matrix data storage. It may be dynamically-sized. #[repr(C)] #[derive(Eq, Debug, Clone, PartialEq)] #[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))] pub struct MatrixVec { data: Vec, nrows: R, ncols: C } impl MatrixVec { /// Creates a new dynamic matrix data storage from the given vector and shape. #[inline] pub fn new(nrows: R, ncols: C, data: Vec) -> MatrixVec { assert!(nrows.value() * ncols.value() == data.len(), "Data storage buffer dimension mismatch."); MatrixVec { data: data, nrows: nrows, ncols: ncols } } /// The underlying data storage. #[inline] pub fn data(&self) -> &Vec { &self.data } /// The underlying mutable data storage. /// /// This is unsafe because this may cause UB if the vector is modified by the user. #[inline] pub unsafe fn data_mut(&mut self) -> &mut Vec { &mut self.data } /// Resizes the undelying mutable data storage and unrwaps it. /// /// If `sz` is larger than the current size, additional elements are uninitialized. /// If `sz` is smaller than the current size, additional elements are trucated. #[inline] pub unsafe fn resize(mut self, sz: usize) -> Vec{ let len = self.len(); if sz < len { self.data.set_len(sz); self.data.shrink_to_fit(); } else { self.data.reserve_exact(sz - len); self.data.set_len(sz); } self.data } } impl Deref for MatrixVec { type Target = Vec; #[inline] fn deref(&self) -> &Self::Target { &self.data } } /* * * Dynamic − Static * Dynamic − Dynamic * */ unsafe impl Storage for MatrixVec where DefaultAllocator: Allocator { type RStride = U1; type CStride = Dynamic; #[inline] fn ptr(&self) -> *const N { self.data.as_ptr() } #[inline] fn shape(&self) -> (Dynamic, C) { (self.nrows, self.ncols) } #[inline] fn strides(&self) -> (Self::RStride, Self::CStride) { (Self::RStride::name(), self.nrows) } #[inline] fn is_contiguous(&self) -> bool { true } #[inline] fn into_owned(self) -> Owned where DefaultAllocator: Allocator { self } #[inline] fn clone_owned(&self) -> Owned where DefaultAllocator: Allocator { self.clone() } #[inline] fn as_slice(&self) -> &[N] { &self[..] } } unsafe impl Storage for MatrixVec where DefaultAllocator: Allocator { type RStride = U1; type CStride = R; #[inline] fn ptr(&self) -> *const N { self.data.as_ptr() } #[inline] fn shape(&self) -> (R, Dynamic) { (self.nrows, self.ncols) } #[inline] fn strides(&self) -> (Self::RStride, Self::CStride) { (Self::RStride::name(), self.nrows) } #[inline] fn is_contiguous(&self) -> bool { true } #[inline] fn into_owned(self) -> Owned where DefaultAllocator: Allocator { self } #[inline] fn clone_owned(&self) -> Owned where DefaultAllocator: Allocator { self.clone() } #[inline] fn as_slice(&self) -> &[N] { &self[..] } } /* * * StorageMut, ContiguousStorage. * */ unsafe impl StorageMut for MatrixVec where DefaultAllocator: Allocator { #[inline] fn ptr_mut(&mut self) -> *mut N { self.data.as_mut_ptr() } #[inline] fn as_mut_slice(&mut self) -> &mut [N] { &mut self.data[..] } } unsafe impl ContiguousStorage for MatrixVec where DefaultAllocator: Allocator { } unsafe impl ContiguousStorageMut for MatrixVec where DefaultAllocator: Allocator { } unsafe impl StorageMut for MatrixVec where DefaultAllocator: Allocator { #[inline] fn ptr_mut(&mut self) -> *mut N { self.data.as_mut_ptr() } #[inline] fn as_mut_slice(&mut self) -> &mut [N] { &mut self.data[..] } } #[cfg(feature = "abomonation-serialize")] impl Abomonation for MatrixVec { unsafe fn entomb(&self, writer: &mut Vec) { self.data.entomb(writer) } unsafe fn embalm(&mut self) { self.data.embalm() } unsafe fn exhume<'a, 'b>(&'a mut self, bytes: &'b mut [u8]) -> Option<&'b mut [u8]> { self.data.exhume(bytes) } } unsafe impl ContiguousStorage for MatrixVec where DefaultAllocator: Allocator { } unsafe impl ContiguousStorageMut for MatrixVec where DefaultAllocator: Allocator { }