use std::fmt::{self, Debug, Formatter}; use std::hash::{Hash, Hasher}; #[cfg(feature = "abomonation-serialize")] use std::io::{Result as IOResult, Write}; use std::ops::{Deref, DerefMut, Mul}; #[cfg(feature = "serde-serialize")] use serde::de::{Error, SeqAccess, Visitor}; #[cfg(feature = "serde-serialize")] use serde::ser::SerializeSeq; #[cfg(feature = "serde-serialize")] use serde::{Deserialize, Deserializer, Serialize, Serializer}; #[cfg(feature = "serde-serialize")] use std::marker::PhantomData; #[cfg(feature = "serde-serialize")] use std::mem; #[cfg(feature = "abomonation-serialize")] use abomonation::Abomonation; use generic_array::{ArrayLength, GenericArray}; use typenum::Prod; use crate::base::allocator::Allocator; use crate::base::default_allocator::DefaultAllocator; use crate::base::dimension::{DimName, U1}; use crate::base::storage::{ ContiguousStorage, ContiguousStorageMut, Owned, ReshapableStorage, Storage, StorageMut, }; use crate::base::Scalar; /* * * Static Storage. * */ /// A array-based statically sized matrix data storage. #[repr(C)] pub struct ArrayStorage where R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { data: GenericArray>, } #[deprecated(note = "renamed to `ArrayStorage`")] /// Renamed to [ArrayStorage]. pub type MatrixArray = ArrayStorage; impl Default for ArrayStorage where R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, N: Default, { fn default() -> Self { ArrayStorage { data: Default::default(), } } } impl Hash for ArrayStorage where N: Hash, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { fn hash(&self, state: &mut H) { self.data[..].hash(state) } } impl Deref for ArrayStorage where R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { type Target = GenericArray>; #[inline] fn deref(&self) -> &Self::Target { &self.data } } impl DerefMut for ArrayStorage where R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { #[inline] fn deref_mut(&mut self) -> &mut Self::Target { &mut self.data } } impl Debug for ArrayStorage where N: Debug, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { #[inline] fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { self.data.fmt(fmt) } } impl Copy for ArrayStorage where N: Copy, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, GenericArray>: Copy, { } impl Clone for ArrayStorage where N: Clone, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { #[inline] fn clone(&self) -> Self { ArrayStorage { data: self.data.clone(), } } } impl Eq for ArrayStorage where N: Eq, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { } impl PartialEq for ArrayStorage where N: PartialEq, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { #[inline] fn eq(&self, right: &Self) -> bool { self.data == right.data } } unsafe impl Storage for ArrayStorage where N: Scalar, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, DefaultAllocator: Allocator, { type RStride = U1; type CStride = R; #[inline] fn ptr(&self) -> *const N { self[..].as_ptr() } #[inline] fn shape(&self) -> (R, C) { (R::name(), C::name()) } #[inline] fn strides(&self) -> (Self::RStride, Self::CStride) { (Self::RStride::name(), Self::CStride::name()) } #[inline] fn is_contiguous(&self) -> bool { true } #[inline] fn into_owned(self) -> Owned where DefaultAllocator: Allocator, { self } #[inline] fn clone_owned(&self) -> Owned where DefaultAllocator: Allocator, { let it = self.iter().cloned(); DefaultAllocator::allocate_from_iterator(self.shape().0, self.shape().1, it) } #[inline] fn as_slice(&self) -> &[N] { &self[..] } } unsafe impl StorageMut for ArrayStorage where N: Scalar, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, DefaultAllocator: Allocator, { #[inline] fn ptr_mut(&mut self) -> *mut N { self[..].as_mut_ptr() } #[inline] fn as_mut_slice(&mut self) -> &mut [N] { &mut self[..] } } unsafe impl ContiguousStorage for ArrayStorage where N: Scalar, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, DefaultAllocator: Allocator, { } unsafe impl ContiguousStorageMut for ArrayStorage where N: Scalar, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, DefaultAllocator: Allocator, { } impl ReshapableStorage for ArrayStorage where N: Scalar, R1: DimName, C1: DimName, R1::Value: Mul, Prod: ArrayLength, R2: DimName, C2: DimName, R2::Value: Mul>, Prod: ArrayLength, { type Output = ArrayStorage; fn reshape_generic(self, _: R2, _: C2) -> Self::Output { ArrayStorage { data: self.data } } } /* * * Allocation-less serde impls. * */ // XXX: open an issue for GenericArray so that it implements serde traits? #[cfg(feature = "serde-serialize")] impl Serialize for ArrayStorage where N: Scalar + Serialize, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { fn serialize(&self, serializer: S) -> Result where S: Serializer, { let mut serializer = serializer.serialize_seq(Some(R::dim() * C::dim()))?; for e in self.iter() { serializer.serialize_element(e)?; } serializer.end() } } #[cfg(feature = "serde-serialize")] impl<'a, N, R, C> Deserialize<'a> for ArrayStorage where N: Scalar + Deserialize<'a>, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { fn deserialize(deserializer: D) -> Result where D: Deserializer<'a>, { deserializer.deserialize_seq(ArrayStorageVisitor::new()) } } #[cfg(feature = "serde-serialize")] /// A visitor that produces a matrix array. struct ArrayStorageVisitor { marker: PhantomData<(N, R, C)>, } #[cfg(feature = "serde-serialize")] impl ArrayStorageVisitor where N: Scalar, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { /// Construct a new sequence visitor. pub fn new() -> Self { ArrayStorageVisitor { marker: PhantomData, } } } #[cfg(feature = "serde-serialize")] impl<'a, N, R, C> Visitor<'a> for ArrayStorageVisitor where N: Scalar + Deserialize<'a>, R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, { type Value = ArrayStorage; fn expecting(&self, formatter: &mut Formatter) -> fmt::Result { formatter.write_str("a matrix array") } #[inline] fn visit_seq(self, mut visitor: V) -> Result, V::Error> where V: SeqAccess<'a>, { let mut out: Self::Value = unsafe { mem::MaybeUninit::uninit().assume_init() }; let mut curr = 0; while let Some(value) = visitor.next_element()? { *out.get_mut(curr) .ok_or_else(|| V::Error::invalid_length(curr, &self))? = value; curr += 1; } if curr == R::dim() * C::dim() { Ok(out) } else { Err(V::Error::invalid_length(curr, &self)) } } } #[cfg(feature = "bytemuck")] unsafe impl bytemuck::Zeroable for ArrayStorage where R::Value: Mul, Prod: ArrayLength, Self: Copy, { } #[cfg(feature = "bytemuck")] unsafe impl bytemuck::Pod for ArrayStorage where R::Value: Mul, Prod: ArrayLength, Self: Copy, { } #[cfg(feature = "abomonation-serialize")] impl Abomonation for ArrayStorage where R: DimName, C: DimName, R::Value: Mul, Prod: ArrayLength, N: Abomonation, { unsafe fn entomb(&self, writer: &mut W) -> IOResult<()> { for element in self.data.as_slice() { element.entomb(writer)?; } Ok(()) } unsafe fn exhume<'a, 'b>(&'a mut self, mut bytes: &'b mut [u8]) -> Option<&'b mut [u8]> { for element in self.data.as_mut_slice() { let temp = bytes; bytes = if let Some(remainder) = element.exhume(temp) { remainder } else { return None; } } Some(bytes) } fn extent(&self) -> usize { self.data .as_slice() .iter() .fold(0, |acc, e| acc + e.extent()) } }