nalgebra/src/core/matrix_vec.rs

241 lines
5.8 KiB
Rust
Raw Normal View History

use std::ops::Deref;
use core::Scalar;
use core::dimension::{Dim, DimName, Dynamic, U1};
use core::storage::{Storage, StorageMut, Owned, ContiguousStorage, ContiguousStorageMut};
use core::allocator::Allocator;
use core::default_allocator::DefaultAllocator;
#[cfg(feature = "abomonation-serialize")]
use abomonation::Abomonation;
/*
*
* Storage.
*
*/
/// A Vec-based matrix data storage. It may be dynamically-sized.
#[repr(C)]
#[derive(Eq, Debug, Clone, PartialEq)]
#[cfg_attr(feature = "serde-serialize", derive(Serialize, Deserialize))]
pub struct MatrixVec<N, R: Dim, C: Dim> {
data: Vec<N>,
nrows: R,
ncols: C
}
impl<N, R: Dim, C: Dim> MatrixVec<N, R, C> {
2017-02-13 01:17:09 +08:00
/// Creates a new dynamic matrix data storage from the given vector and shape.
#[inline]
pub fn new(nrows: R, ncols: C, data: Vec<N>) -> MatrixVec<N, R, C> {
2017-02-13 01:17:09 +08:00
assert!(nrows.value() * ncols.value() == data.len(), "Data storage buffer dimension mismatch.");
MatrixVec {
data: data,
nrows: nrows,
ncols: ncols
}
}
/// The underlying data storage.
#[inline]
pub fn data(&self) -> &Vec<N> {
&self.data
}
/// The underlying mutable data storage.
///
/// This is unsafe because this may cause UB if the vector is modified by the user.
#[inline]
pub unsafe fn data_mut(&mut self) -> &mut Vec<N> {
&mut self.data
}
/// Resizes the undelying mutable data storage and unrwaps it.
///
/// If `sz` is larger than the current size, additional elements are uninitialized.
/// If `sz` is smaller than the current size, additional elements are trucated.
#[inline]
pub unsafe fn resize(mut self, sz: usize) -> Vec<N>{
let len = self.len();
if sz < len {
self.data.set_len(sz);
self.data.shrink_to_fit();
}
else {
self.data.reserve_exact(sz - len);
self.data.set_len(sz);
}
self.data
}
}
impl<N, R: Dim, C: Dim> Deref for MatrixVec<N, R, C> {
type Target = Vec<N>;
#[inline]
fn deref(&self) -> &Self::Target {
&self.data
}
}
/*
*
* Dynamic Static
* Dynamic Dynamic
*
*/
unsafe impl<N: Scalar, C: Dim> Storage<N, Dynamic, C> for MatrixVec<N, Dynamic, C>
where DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self> {
type RStride = U1;
type CStride = Dynamic;
#[inline]
fn ptr(&self) -> *const N {
self.data.as_ptr()
}
#[inline]
fn shape(&self) -> (Dynamic, C) {
(self.nrows, self.ncols)
}
#[inline]
fn strides(&self) -> (Self::RStride, Self::CStride) {
(Self::RStride::name(), self.nrows)
}
#[inline]
fn is_contiguous(&self) -> bool {
true
}
#[inline]
fn into_owned(self) -> Owned<N, Dynamic, C>
where DefaultAllocator: Allocator<N, Dynamic, C> {
self
}
#[inline]
fn clone_owned(&self) -> Owned<N, Dynamic, C>
where DefaultAllocator: Allocator<N, Dynamic, C> {
self.clone()
}
#[inline]
fn as_slice(&self) -> &[N] {
&self[..]
}
}
unsafe impl<N: Scalar, R: DimName> Storage<N, R, Dynamic> for MatrixVec<N, R, Dynamic>
where DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self> {
type RStride = U1;
type CStride = R;
#[inline]
fn ptr(&self) -> *const N {
self.data.as_ptr()
}
#[inline]
fn shape(&self) -> (R, Dynamic) {
(self.nrows, self.ncols)
}
#[inline]
fn strides(&self) -> (Self::RStride, Self::CStride) {
(Self::RStride::name(), self.nrows)
}
#[inline]
fn is_contiguous(&self) -> bool {
true
}
#[inline]
fn into_owned(self) -> Owned<N, R, Dynamic>
where DefaultAllocator: Allocator<N, R, Dynamic> {
self
}
#[inline]
fn clone_owned(&self) -> Owned<N, R, Dynamic>
where DefaultAllocator: Allocator<N, R, Dynamic> {
self.clone()
}
#[inline]
fn as_slice(&self) -> &[N] {
&self[..]
}
}
/*
*
* StorageMut, ContiguousStorage.
*
*/
unsafe impl<N: Scalar, C: Dim> StorageMut<N, Dynamic, C> for MatrixVec<N, Dynamic, C>
where DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self> {
#[inline]
fn ptr_mut(&mut self) -> *mut N {
self.data.as_mut_ptr()
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [N] {
&mut self.data[..]
}
}
unsafe impl<N: Scalar, C: Dim> ContiguousStorage<N, Dynamic, C> for MatrixVec<N, Dynamic, C>
where DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self> {
}
unsafe impl<N: Scalar, C: Dim> ContiguousStorageMut<N, Dynamic, C> for MatrixVec<N, Dynamic, C>
where DefaultAllocator: Allocator<N, Dynamic, C, Buffer = Self> {
}
unsafe impl<N: Scalar, R: DimName> StorageMut<N, R, Dynamic> for MatrixVec<N, R, Dynamic>
where DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self> {
#[inline]
fn ptr_mut(&mut self) -> *mut N {
self.data.as_mut_ptr()
}
#[inline]
fn as_mut_slice(&mut self) -> &mut [N] {
&mut self.data[..]
}
}
#[cfg(feature = "abomonation-serialize")]
impl<N: Abomonation, R: Dim, C: Dim> Abomonation for MatrixVec<N, R, C> {
unsafe fn entomb(&self, writer: &mut Vec<u8>) {
self.data.entomb(writer)
}
unsafe fn embalm(&mut self) {
self.data.embalm()
}
unsafe fn exhume<'a, 'b>(&'a mut self, bytes: &'b mut [u8]) -> Option<&'b mut [u8]> {
self.data.exhume(bytes)
}
}
2017-08-16 01:18:39 +08:00
unsafe impl<N: Scalar, R: DimName> ContiguousStorage<N, R, Dynamic> for MatrixVec<N, R, Dynamic>
where DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self> {
}
unsafe impl<N: Scalar, R: DimName> ContiguousStorageMut<N, R, Dynamic> for MatrixVec<N, R, Dynamic>
where DefaultAllocator: Allocator<N, R, Dynamic, Buffer = Self> {
}