2017-02-13 01:17:09 +08:00
|
|
|
|
//! The default matrix data storage allocator.
|
|
|
|
|
//!
|
|
|
|
|
//! This will use stack-allocated buffers for matrices with dimensions known at compile-time, and
|
|
|
|
|
//! heap-allocated buffers for matrices with at least one dimension unknown at compile-time.
|
|
|
|
|
|
2017-08-03 01:37:44 +08:00
|
|
|
|
use std::cmp;
|
2021-07-20 07:00:40 +08:00
|
|
|
|
use std::fmt;
|
|
|
|
|
use std::mem::{self, ManuallyDrop, MaybeUninit};
|
2018-05-19 23:15:15 +08:00
|
|
|
|
use std::ptr;
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
2018-05-24 23:17:34 +08:00
|
|
|
|
#[cfg(all(feature = "alloc", not(feature = "std")))]
|
2018-07-20 22:10:12 +08:00
|
|
|
|
use alloc::vec::Vec;
|
2018-05-24 23:17:34 +08:00
|
|
|
|
|
2021-07-20 07:00:40 +08:00
|
|
|
|
#[cfg(any(feature = "alloc", feature = "std"))]
|
|
|
|
|
use crate::base::dimension::Dynamic;
|
|
|
|
|
|
2021-01-03 22:20:34 +08:00
|
|
|
|
use super::Const;
|
2021-07-15 06:21:22 +08:00
|
|
|
|
use crate::base::allocator::{Allocator, InnerAllocator, Reallocator};
|
2020-04-06 00:49:48 +08:00
|
|
|
|
use crate::base::array_storage::ArrayStorage;
|
2019-03-23 21:29:07 +08:00
|
|
|
|
use crate::base::dimension::{Dim, DimName};
|
2021-07-20 07:00:40 +08:00
|
|
|
|
use crate::base::storage::{
|
|
|
|
|
ContiguousStorage, ContiguousStorageMut, InnerOwned, Storage, StorageMut,
|
|
|
|
|
};
|
2019-03-23 21:29:07 +08:00
|
|
|
|
use crate::base::vec_storage::VecStorage;
|
2021-07-20 07:00:40 +08:00
|
|
|
|
use crate::U1;
|
2021-07-15 06:21:22 +08:00
|
|
|
|
|
2016-12-05 05:44:42 +08:00
|
|
|
|
/*
|
|
|
|
|
*
|
|
|
|
|
* Allocator.
|
|
|
|
|
*
|
|
|
|
|
*/
|
2021-07-17 17:36:14 +08:00
|
|
|
|
/// A helper struct that controls how the storage for a matrix should be allocated.
|
|
|
|
|
///
|
|
|
|
|
/// This struct is useless on its own. Instead, it's used in trait
|
2018-12-06 05:46:17 +08:00
|
|
|
|
/// An allocator based on `GenericArray` and `VecStorage` for statically-sized and dynamically-sized
|
2016-12-05 05:44:42 +08:00
|
|
|
|
/// matrices respectively.
|
|
|
|
|
pub struct DefaultAllocator;
|
|
|
|
|
|
|
|
|
|
// Static - Static
|
2021-07-15 06:21:22 +08:00
|
|
|
|
impl<T, const R: usize, const C: usize> InnerAllocator<T, Const<R>, Const<C>> for DefaultAllocator {
|
2021-04-11 17:00:38 +08:00
|
|
|
|
type Buffer = ArrayStorage<T, R, C>;
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
|
|
|
|
#[inline]
|
2021-04-11 17:00:38 +08:00
|
|
|
|
fn allocate_from_iterator<I: IntoIterator<Item = T>>(
|
2021-01-03 22:20:34 +08:00
|
|
|
|
nrows: Const<R>,
|
|
|
|
|
ncols: Const<C>,
|
2018-02-02 19:26:35 +08:00
|
|
|
|
iter: I,
|
2020-04-06 00:49:48 +08:00
|
|
|
|
) -> Self::Buffer {
|
2021-07-14 17:25:16 +08:00
|
|
|
|
let mut res = Self::allocate_uninitialized(nrows, ncols);
|
2016-12-05 05:44:42 +08:00
|
|
|
|
let mut count = 0;
|
|
|
|
|
|
2021-01-03 22:20:34 +08:00
|
|
|
|
for (res, e) in res.as_mut_slice().iter_mut().zip(iter.into_iter()) {
|
2021-07-14 17:25:16 +08:00
|
|
|
|
*res = MaybeUninit::new(e);
|
2016-12-05 05:44:42 +08:00
|
|
|
|
count += 1;
|
|
|
|
|
}
|
|
|
|
|
|
2018-02-02 19:26:35 +08:00
|
|
|
|
assert!(
|
|
|
|
|
count == nrows.value() * ncols.value(),
|
|
|
|
|
"Matrix init. from iterator: iterator not long enough."
|
|
|
|
|
);
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
2021-07-14 17:25:16 +08:00
|
|
|
|
// Safety: we have initialized all entries.
|
2021-07-15 02:24:27 +08:00
|
|
|
|
unsafe { <Self as Allocator<T, Const<R>, Const<C>>>::assume_init(res) }
|
2016-12-05 05:44:42 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-15 06:21:22 +08:00
|
|
|
|
impl<T, const R: usize, const C: usize> Allocator<T, Const<R>, Const<C>> for DefaultAllocator {
|
|
|
|
|
#[inline]
|
|
|
|
|
fn allocate_uninitialized(
|
|
|
|
|
_: Const<R>,
|
|
|
|
|
_: Const<C>,
|
2021-07-20 07:00:40 +08:00
|
|
|
|
) -> InnerOwned<MaybeUninit<T>, Const<R>, Const<C>> {
|
2021-07-18 02:01:03 +08:00
|
|
|
|
// SAFETY: An uninitialized `[MaybeUninit<_>; _]` is valid.
|
2021-07-17 17:36:14 +08:00
|
|
|
|
let array = unsafe { MaybeUninit::uninit().assume_init() };
|
|
|
|
|
ArrayStorage(array)
|
2021-07-15 06:21:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
|
unsafe fn assume_init(
|
|
|
|
|
uninit: <Self as InnerAllocator<MaybeUninit<T>, Const<R>, Const<C>>>::Buffer,
|
2021-07-20 07:00:40 +08:00
|
|
|
|
) -> InnerOwned<T, Const<R>, Const<C>> {
|
2021-07-18 09:19:20 +08:00
|
|
|
|
// Safety:
|
2021-07-16 12:56:58 +08:00
|
|
|
|
// * The caller guarantees that all elements of the array are initialized
|
|
|
|
|
// * `MaybeUninit<T>` and T are guaranteed to have the same layout
|
2021-07-18 02:01:03 +08:00
|
|
|
|
// * `MaybeUnint` does not drop, so there are no double-frees
|
2021-07-16 12:56:58 +08:00
|
|
|
|
// And thus the conversion is safe
|
|
|
|
|
ArrayStorage((&uninit as *const _ as *const [_; C]).read())
|
2021-07-15 06:21:22 +08:00
|
|
|
|
}
|
2021-07-18 02:01:03 +08:00
|
|
|
|
|
|
|
|
|
/// Specifies that a given buffer's entries should be manually dropped.
|
|
|
|
|
#[inline]
|
|
|
|
|
fn manually_drop(
|
|
|
|
|
buf: <Self as InnerAllocator<T, Const<R>, Const<C>>>::Buffer,
|
|
|
|
|
) -> <Self as InnerAllocator<ManuallyDrop<T>, Const<R>, Const<C>>>::Buffer {
|
|
|
|
|
// SAFETY:
|
|
|
|
|
// * `ManuallyDrop<T>` and T are guaranteed to have the same layout
|
|
|
|
|
// * `ManuallyDrop` does not drop, so there are no double-frees
|
|
|
|
|
// And thus the conversion is safe
|
2021-07-18 10:43:50 +08:00
|
|
|
|
unsafe { ArrayStorage((&ManuallyDrop::new(buf) as *const _ as *const [_; C]).read()) }
|
2021-07-18 02:01:03 +08:00
|
|
|
|
}
|
2021-07-15 06:21:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
2016-12-05 05:44:42 +08:00
|
|
|
|
// Dynamic - Static
|
|
|
|
|
// Dynamic - Dynamic
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-15 06:21:22 +08:00
|
|
|
|
impl<T, C: Dim> InnerAllocator<T, Dynamic, C> for DefaultAllocator {
|
2021-04-11 17:00:38 +08:00
|
|
|
|
type Buffer = VecStorage<T, Dynamic, C>;
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
|
|
|
|
#[inline]
|
2021-07-15 06:21:22 +08:00
|
|
|
|
fn allocate_from_iterator<I: IntoIterator<Item = T>>(
|
|
|
|
|
nrows: Dynamic,
|
|
|
|
|
ncols: C,
|
|
|
|
|
iter: I,
|
|
|
|
|
) -> Self::Buffer {
|
|
|
|
|
let it = iter.into_iter();
|
|
|
|
|
let res: Vec<T> = it.collect();
|
|
|
|
|
assert!(res.len() == nrows.value() * ncols.value(),
|
|
|
|
|
"Allocation from iterator error: the iterator did not yield the correct number of elements.");
|
|
|
|
|
|
|
|
|
|
VecStorage::new(nrows, ncols, res)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T, C: Dim> Allocator<T, Dynamic, C> for DefaultAllocator {
|
|
|
|
|
#[inline]
|
2021-07-20 07:00:40 +08:00
|
|
|
|
fn allocate_uninitialized(nrows: Dynamic, ncols: C) -> InnerOwned<MaybeUninit<T>, Dynamic, C> {
|
2021-07-14 17:25:16 +08:00
|
|
|
|
let mut data = Vec::new();
|
2016-12-05 05:44:42 +08:00
|
|
|
|
let length = nrows.value() * ncols.value();
|
2021-07-14 17:25:16 +08:00
|
|
|
|
data.reserve_exact(length);
|
|
|
|
|
data.resize_with(length, MaybeUninit::uninit);
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
2021-07-14 17:25:16 +08:00
|
|
|
|
VecStorage::new(nrows, ncols, data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[inline]
|
2021-07-20 07:00:40 +08:00
|
|
|
|
unsafe fn assume_init(
|
|
|
|
|
uninit: InnerOwned<MaybeUninit<T>, Dynamic, C>,
|
|
|
|
|
) -> InnerOwned<T, Dynamic, C> {
|
2021-07-18 10:43:50 +08:00
|
|
|
|
// Avoids a double-drop.
|
|
|
|
|
let (nrows, ncols) = uninit.shape();
|
|
|
|
|
let vec: Vec<_> = uninit.into();
|
|
|
|
|
let mut md = ManuallyDrop::new(vec);
|
2021-07-14 17:25:16 +08:00
|
|
|
|
|
2021-07-18 10:43:50 +08:00
|
|
|
|
// Safety:
|
|
|
|
|
// - MaybeUninit<T> has the same alignment and layout as T.
|
|
|
|
|
// - The length and capacity come from a valid vector.
|
|
|
|
|
let new_data = Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity());
|
2021-07-14 17:25:16 +08:00
|
|
|
|
|
2021-07-18 10:43:50 +08:00
|
|
|
|
VecStorage::new(nrows, ncols, new_data)
|
2016-12-05 05:44:42 +08:00
|
|
|
|
}
|
2021-07-18 02:01:03 +08:00
|
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
|
fn manually_drop(
|
|
|
|
|
buf: <Self as InnerAllocator<T, Dynamic, C>>::Buffer,
|
|
|
|
|
) -> <Self as InnerAllocator<ManuallyDrop<T>, Dynamic, C>>::Buffer {
|
2021-07-18 10:43:50 +08:00
|
|
|
|
// Avoids a double-drop.
|
|
|
|
|
let (nrows, ncols) = buf.shape();
|
|
|
|
|
let vec: Vec<_> = buf.into();
|
|
|
|
|
let mut md = ManuallyDrop::new(vec);
|
|
|
|
|
|
|
|
|
|
// Safety:
|
|
|
|
|
// - ManuallyDrop<T> has the same alignment and layout as T.
|
|
|
|
|
// - The length and capacity come from a valid vector.
|
|
|
|
|
let new_data =
|
|
|
|
|
unsafe { Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity()) };
|
|
|
|
|
|
|
|
|
|
VecStorage::new(nrows, ncols, new_data)
|
2021-07-18 02:01:03 +08:00
|
|
|
|
}
|
2021-07-15 06:21:22 +08:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Static - Dynamic
|
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
|
|
|
|
impl<T, R: DimName> InnerAllocator<T, R, Dynamic> for DefaultAllocator {
|
|
|
|
|
type Buffer = VecStorage<T, R, Dynamic>;
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
|
|
|
|
#[inline]
|
2021-04-11 17:00:38 +08:00
|
|
|
|
fn allocate_from_iterator<I: IntoIterator<Item = T>>(
|
2021-07-15 06:21:22 +08:00
|
|
|
|
nrows: R,
|
|
|
|
|
ncols: Dynamic,
|
2018-02-02 19:26:35 +08:00
|
|
|
|
iter: I,
|
2021-07-20 07:00:40 +08:00
|
|
|
|
) -> InnerOwned<T, R, Dynamic> {
|
2016-12-05 05:44:42 +08:00
|
|
|
|
let it = iter.into_iter();
|
2021-04-11 17:00:38 +08:00
|
|
|
|
let res: Vec<T> = it.collect();
|
2016-12-05 05:44:42 +08:00
|
|
|
|
assert!(res.len() == nrows.value() * ncols.value(),
|
|
|
|
|
"Allocation from iterator error: the iterator did not yield the correct number of elements.");
|
|
|
|
|
|
2018-12-06 05:46:17 +08:00
|
|
|
|
VecStorage::new(nrows, ncols, res)
|
2016-12-05 05:44:42 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, R: DimName> Allocator<T, R, Dynamic> for DefaultAllocator {
|
2016-12-05 05:44:42 +08:00
|
|
|
|
#[inline]
|
2021-07-20 07:00:40 +08:00
|
|
|
|
fn allocate_uninitialized(nrows: R, ncols: Dynamic) -> InnerOwned<MaybeUninit<T>, R, Dynamic> {
|
2021-07-14 17:25:16 +08:00
|
|
|
|
let mut data = Vec::new();
|
2016-12-05 05:44:42 +08:00
|
|
|
|
let length = nrows.value() * ncols.value();
|
2021-07-14 17:25:16 +08:00
|
|
|
|
data.reserve_exact(length);
|
|
|
|
|
data.resize_with(length, MaybeUninit::uninit);
|
|
|
|
|
|
|
|
|
|
VecStorage::new(nrows, ncols, data)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[inline]
|
2021-07-20 07:00:40 +08:00
|
|
|
|
unsafe fn assume_init(
|
|
|
|
|
uninit: InnerOwned<MaybeUninit<T>, R, Dynamic>,
|
|
|
|
|
) -> InnerOwned<T, R, Dynamic> {
|
2021-07-18 10:43:50 +08:00
|
|
|
|
// Avoids a double-drop.
|
|
|
|
|
let (nrows, ncols) = uninit.shape();
|
|
|
|
|
let vec: Vec<_> = uninit.into();
|
|
|
|
|
let mut md = ManuallyDrop::new(vec);
|
2016-12-05 05:44:42 +08:00
|
|
|
|
|
2021-07-18 10:43:50 +08:00
|
|
|
|
// Safety:
|
|
|
|
|
// - MaybeUninit<T> has the same alignment and layout as T.
|
|
|
|
|
// - The length and capacity come from a valid vector.
|
|
|
|
|
let new_data = Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity());
|
2021-07-14 17:25:16 +08:00
|
|
|
|
|
2021-07-18 10:43:50 +08:00
|
|
|
|
VecStorage::new(nrows, ncols, new_data)
|
2016-12-05 05:44:42 +08:00
|
|
|
|
}
|
2021-07-18 02:01:03 +08:00
|
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
|
fn manually_drop(
|
|
|
|
|
buf: <Self as InnerAllocator<T, R, Dynamic>>::Buffer,
|
|
|
|
|
) -> <Self as InnerAllocator<ManuallyDrop<T>, R, Dynamic>>::Buffer {
|
2021-07-18 10:43:50 +08:00
|
|
|
|
// Avoids a double-drop.
|
|
|
|
|
let (nrows, ncols) = buf.shape();
|
|
|
|
|
let vec: Vec<_> = buf.into();
|
|
|
|
|
let mut md = ManuallyDrop::new(vec);
|
|
|
|
|
|
|
|
|
|
// Safety:
|
|
|
|
|
// - ManuallyDrop<T> has the same alignment and layout as T.
|
|
|
|
|
// - The length and capacity come from a valid vector.
|
|
|
|
|
let new_data =
|
|
|
|
|
unsafe { Vec::from_raw_parts(md.as_mut_ptr() as *mut _, md.len(), md.capacity()) };
|
|
|
|
|
|
|
|
|
|
VecStorage::new(nrows, ncols, new_data)
|
2021-07-18 02:01:03 +08:00
|
|
|
|
}
|
2016-12-05 05:44:42 +08:00
|
|
|
|
}
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
2021-07-20 07:00:40 +08:00
|
|
|
|
/// The owned storage type for a matrix.
|
|
|
|
|
#[repr(transparent)]
|
|
|
|
|
pub struct Owned<T, R: Dim, C: Dim>(pub InnerOwned<T, R, C>)
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>;
|
|
|
|
|
|
|
|
|
|
impl<T: Copy, R: DimName, C: DimName> Copy for Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>,
|
|
|
|
|
InnerOwned<T, R, C>: Copy,
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T: Clone, R: Dim, C: Dim> Clone for Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>,
|
|
|
|
|
{
|
|
|
|
|
fn clone(&self) -> Self {
|
|
|
|
|
if Self::is_array() {
|
|
|
|
|
// We first clone the data.
|
|
|
|
|
let slice = unsafe { self.as_slice_unchecked() };
|
|
|
|
|
let vec = ManuallyDrop::new(slice.to_owned());
|
|
|
|
|
|
|
|
|
|
// We then transmute it back into an array and then an Owned.
|
|
|
|
|
unsafe { mem::transmute_copy(&*vec.as_ptr()) }
|
|
|
|
|
|
|
|
|
|
// TODO: check that the auxiliary copy is elided.
|
|
|
|
|
} else {
|
|
|
|
|
// We first clone the data.
|
|
|
|
|
let clone = ManuallyDrop::new(self.as_vec_storage().clone());
|
|
|
|
|
|
|
|
|
|
// We then transmute it back into an Owned.
|
|
|
|
|
unsafe { mem::transmute_copy(&clone) }
|
|
|
|
|
|
|
|
|
|
// TODO: check that the auxiliary copy is elided.
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T: fmt::Debug, R: Dim, C: Dim> fmt::Debug for Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>,
|
|
|
|
|
{
|
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
|
if Self::is_array() {
|
|
|
|
|
let slice = unsafe { self.as_slice_unchecked() };
|
|
|
|
|
slice.fmt(f)
|
|
|
|
|
} else {
|
|
|
|
|
self.as_vec_storage().fmt(f)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T, R: Dim, C: Dim> Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>,
|
|
|
|
|
{
|
|
|
|
|
/// Returns whether `Self` stores an [`ArrayStorage`].
|
|
|
|
|
fn is_array() -> bool {
|
|
|
|
|
R::is_static() && C::is_static()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns whether `Self` stores a [`VecStorage`].
|
|
|
|
|
fn is_vec() -> bool {
|
|
|
|
|
!Self::is_array()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Returns the underlying [`VecStorage`]. Does not do any sort of static
|
|
|
|
|
/// type checking.
|
|
|
|
|
///
|
|
|
|
|
/// # Panics
|
|
|
|
|
/// This method will panic if `Self` does not contain a [`VecStorage`].
|
|
|
|
|
fn as_vec_storage(&self) -> &VecStorage<T, R, C> {
|
|
|
|
|
assert!(Self::is_vec());
|
|
|
|
|
|
|
|
|
|
// Safety: `self` is transparent and must contain a `VecStorage`.
|
|
|
|
|
unsafe { &*(&self as *const _ as *const _) }
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe impl<T, R: Dim, C: Dim> Storage<T, R, C> for Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>,
|
|
|
|
|
{
|
|
|
|
|
type RStride = U1;
|
|
|
|
|
|
|
|
|
|
type CStride = R;
|
|
|
|
|
|
|
|
|
|
fn ptr(&self) -> *const T {
|
|
|
|
|
if Self::is_array() {
|
|
|
|
|
&self as *const _ as *const T
|
|
|
|
|
} else {
|
|
|
|
|
self.as_vec_storage().as_vec().as_ptr()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn shape(&self) -> (R, C) {
|
|
|
|
|
if Self::is_array() {
|
|
|
|
|
(R::default(), C::default())
|
|
|
|
|
} else {
|
|
|
|
|
let vec = self.as_vec_storage();
|
|
|
|
|
(vec.nrows, vec.ncols)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn strides(&self) -> (Self::RStride, Self::CStride) {
|
|
|
|
|
if Self::is_array() {
|
|
|
|
|
(U1::name(), R::default())
|
|
|
|
|
} else {
|
|
|
|
|
let vec = self.as_vec_storage();
|
|
|
|
|
(U1::name(), vec.nrows)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn is_contiguous(&self) -> bool {
|
|
|
|
|
true
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe fn as_slice_unchecked(&self) -> &[T] {
|
|
|
|
|
if Self::is_array() {
|
|
|
|
|
std::slice::from_raw_parts(
|
|
|
|
|
self.ptr(),
|
|
|
|
|
R::try_to_usize().unwrap() * C::try_to_usize().unwrap(),
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
self.as_vec_storage().as_vec().as_ref()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn into_owned(self) -> Owned<T, R, C> {
|
|
|
|
|
self
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn clone_owned(&self) -> Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
T: Clone,
|
|
|
|
|
{
|
|
|
|
|
self.clone()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe impl<T, R: Dim, C: Dim> StorageMut<T, R, C> for Owned<T, R, C>
|
|
|
|
|
where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>,
|
|
|
|
|
{
|
|
|
|
|
fn ptr_mut(&mut self) -> *mut T {
|
|
|
|
|
todo!()
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe fn as_mut_slice_unchecked(&mut self) -> &mut [T] {
|
|
|
|
|
todo!()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe impl<T, R: Dim, C: Dim> ContiguousStorage<T, R, C> for Owned<T, R, C> where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
unsafe impl<T, R: Dim, C: Dim> ContiguousStorageMut<T, R, C> for Owned<T, R, C> where
|
|
|
|
|
DefaultAllocator: Allocator<T, R, C>
|
|
|
|
|
{
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-03 01:37:44 +08:00
|
|
|
|
/*
|
|
|
|
|
*
|
|
|
|
|
* Reallocator.
|
|
|
|
|
*
|
|
|
|
|
*/
|
|
|
|
|
// Anything -> Static × Static
|
2021-07-17 17:36:14 +08:00
|
|
|
|
impl<T, RFrom: Dim, CFrom: Dim, const RTO: usize, const CTO: usize>
|
2021-04-11 17:00:38 +08:00
|
|
|
|
Reallocator<T, RFrom, CFrom, Const<RTO>, Const<CTO>> for DefaultAllocator
|
2018-02-02 19:26:35 +08:00
|
|
|
|
where
|
2021-04-11 17:00:38 +08:00
|
|
|
|
Self: Allocator<T, RFrom, CFrom>,
|
2018-02-02 19:26:35 +08:00
|
|
|
|
{
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
2021-01-03 22:20:34 +08:00
|
|
|
|
rto: Const<RTO>,
|
|
|
|
|
cto: Const<CTO>,
|
2021-07-20 07:00:40 +08:00
|
|
|
|
buf: InnerOwned<T, RFrom, CFrom>,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
) -> ArrayStorage<T, RTO, CTO> {
|
2020-11-28 05:00:48 +08:00
|
|
|
|
let mut res =
|
2021-07-15 06:21:22 +08:00
|
|
|
|
<Self as Allocator<_, Const<RTO>, Const<CTO>>>::allocate_uninitialized(rto, cto);
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
|
|
|
|
let (rfrom, cfrom) = buf.shape();
|
|
|
|
|
|
|
|
|
|
let len_from = rfrom.value() * cfrom.value();
|
2018-02-02 19:26:35 +08:00
|
|
|
|
let len_to = rto.value() * cto.value();
|
2021-07-14 17:25:16 +08:00
|
|
|
|
ptr::copy_nonoverlapping(
|
|
|
|
|
buf.ptr(),
|
|
|
|
|
res.ptr_mut() as *mut T,
|
|
|
|
|
cmp::min(len_from, len_to),
|
|
|
|
|
);
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
2021-07-14 17:25:16 +08:00
|
|
|
|
// Safety: TODO
|
2021-07-16 12:56:58 +08:00
|
|
|
|
<Self as Allocator<_, Const<RTO>, Const<CTO>>>::assume_init(res)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Static × Static -> Dynamic × Any
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, CTo, const RFROM: usize, const CFROM: usize>
|
2021-04-11 17:00:38 +08:00
|
|
|
|
Reallocator<T, Const<RFROM>, Const<CFROM>, Dynamic, CTo> for DefaultAllocator
|
2018-02-02 19:26:35 +08:00
|
|
|
|
where
|
|
|
|
|
CTo: Dim,
|
|
|
|
|
{
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
|
|
|
|
rto: Dynamic,
|
|
|
|
|
cto: CTo,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
buf: ArrayStorage<T, RFROM, CFROM>,
|
|
|
|
|
) -> VecStorage<T, Dynamic, CTo> {
|
2021-07-14 17:25:16 +08:00
|
|
|
|
let mut res = <Self as Allocator<T, Dynamic, CTo>>::allocate_uninitialized(rto, cto);
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
|
|
|
|
let (rfrom, cfrom) = buf.shape();
|
|
|
|
|
|
|
|
|
|
let len_from = rfrom.value() * cfrom.value();
|
2018-02-02 19:26:35 +08:00
|
|
|
|
let len_to = rto.value() * cto.value();
|
2021-07-14 17:25:16 +08:00
|
|
|
|
ptr::copy_nonoverlapping(
|
|
|
|
|
buf.ptr(),
|
|
|
|
|
res.ptr_mut() as *mut T,
|
|
|
|
|
cmp::min(len_from, len_to),
|
|
|
|
|
);
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
2021-07-14 17:25:16 +08:00
|
|
|
|
<Self as Allocator<T, Dynamic, CTo>>::assume_init(res)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Static × Static -> Static × Dynamic
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, RTo, const RFROM: usize, const CFROM: usize>
|
2021-04-11 17:00:38 +08:00
|
|
|
|
Reallocator<T, Const<RFROM>, Const<CFROM>, RTo, Dynamic> for DefaultAllocator
|
2018-02-02 19:26:35 +08:00
|
|
|
|
where
|
|
|
|
|
RTo: DimName,
|
|
|
|
|
{
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
|
|
|
|
rto: RTo,
|
|
|
|
|
cto: Dynamic,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
buf: ArrayStorage<T, RFROM, CFROM>,
|
|
|
|
|
) -> VecStorage<T, RTo, Dynamic> {
|
2021-07-14 17:25:16 +08:00
|
|
|
|
let mut res = <Self as Allocator<T, RTo, Dynamic>>::allocate_uninitialized(rto, cto);
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
|
|
|
|
let (rfrom, cfrom) = buf.shape();
|
|
|
|
|
|
|
|
|
|
let len_from = rfrom.value() * cfrom.value();
|
2018-02-02 19:26:35 +08:00
|
|
|
|
let len_to = rto.value() * cto.value();
|
2021-07-14 17:25:16 +08:00
|
|
|
|
ptr::copy_nonoverlapping(
|
|
|
|
|
buf.ptr(),
|
|
|
|
|
res.ptr_mut() as *mut T,
|
|
|
|
|
cmp::min(len_from, len_to),
|
|
|
|
|
);
|
2017-08-03 01:37:44 +08:00
|
|
|
|
|
2021-07-14 17:25:16 +08:00
|
|
|
|
<Self as Allocator<T, RTo, Dynamic>>::assume_init(res)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// All conversion from a dynamic buffer to a dynamic buffer.
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, CFrom: Dim, CTo: Dim> Reallocator<T, Dynamic, CFrom, Dynamic, CTo> for DefaultAllocator {
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
|
|
|
|
rto: Dynamic,
|
|
|
|
|
cto: CTo,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
buf: VecStorage<T, Dynamic, CFrom>,
|
|
|
|
|
) -> VecStorage<T, Dynamic, CTo> {
|
2017-08-03 01:37:44 +08:00
|
|
|
|
let new_buf = buf.resize(rto.value() * cto.value());
|
2018-12-06 05:46:17 +08:00
|
|
|
|
VecStorage::new(rto, cto, new_buf)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, CFrom: Dim, RTo: DimName> Reallocator<T, Dynamic, CFrom, RTo, Dynamic>
|
2018-05-19 23:15:15 +08:00
|
|
|
|
for DefaultAllocator
|
|
|
|
|
{
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
|
|
|
|
rto: RTo,
|
|
|
|
|
cto: Dynamic,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
buf: VecStorage<T, Dynamic, CFrom>,
|
|
|
|
|
) -> VecStorage<T, RTo, Dynamic> {
|
2017-08-03 01:37:44 +08:00
|
|
|
|
let new_buf = buf.resize(rto.value() * cto.value());
|
2018-12-06 05:46:17 +08:00
|
|
|
|
VecStorage::new(rto, cto, new_buf)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, RFrom: DimName, CTo: Dim> Reallocator<T, RFrom, Dynamic, Dynamic, CTo>
|
2018-05-19 23:15:15 +08:00
|
|
|
|
for DefaultAllocator
|
|
|
|
|
{
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
|
|
|
|
rto: Dynamic,
|
|
|
|
|
cto: CTo,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
buf: VecStorage<T, RFrom, Dynamic>,
|
|
|
|
|
) -> VecStorage<T, Dynamic, CTo> {
|
2017-08-03 01:37:44 +08:00
|
|
|
|
let new_buf = buf.resize(rto.value() * cto.value());
|
2018-12-06 05:46:17 +08:00
|
|
|
|
VecStorage::new(rto, cto, new_buf)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-19 23:15:15 +08:00
|
|
|
|
#[cfg(any(feature = "std", feature = "alloc"))]
|
2021-07-14 17:25:16 +08:00
|
|
|
|
impl<T, RFrom: DimName, RTo: DimName> Reallocator<T, RFrom, Dynamic, RTo, Dynamic>
|
2018-05-19 23:15:15 +08:00
|
|
|
|
for DefaultAllocator
|
|
|
|
|
{
|
2017-08-03 01:37:44 +08:00
|
|
|
|
#[inline]
|
2018-02-02 19:26:35 +08:00
|
|
|
|
unsafe fn reallocate_copy(
|
|
|
|
|
rto: RTo,
|
|
|
|
|
cto: Dynamic,
|
2021-04-11 17:00:38 +08:00
|
|
|
|
buf: VecStorage<T, RFrom, Dynamic>,
|
|
|
|
|
) -> VecStorage<T, RTo, Dynamic> {
|
2017-08-03 01:37:44 +08:00
|
|
|
|
let new_buf = buf.resize(rto.value() * cto.value());
|
2018-12-06 05:46:17 +08:00
|
|
|
|
VecStorage::new(rto, cto, new_buf)
|
2017-08-03 01:37:44 +08:00
|
|
|
|
}
|
|
|
|
|
}
|