nalgebra/src/base/default_allocator.rs

332 lines
10 KiB
Rust
Raw Normal View History

2017-02-13 01:17:09 +08:00
//! The default matrix data storage allocator.
//!
//! This will use stack-allocated buffers for matrices with dimensions known at compile-time, and
//! heap-allocated buffers for matrices with at least one dimension unknown at compile-time.
use std::cmp;
2021-07-14 17:25:16 +08:00
use std::mem::ManuallyDrop;
use std::mem::MaybeUninit;
use std::ptr;
2018-05-24 23:17:34 +08:00
#[cfg(all(feature = "alloc", not(feature = "std")))]
2018-07-20 22:10:12 +08:00
use alloc::vec::Vec;
2018-05-24 23:17:34 +08:00
use super::Const;
2021-07-15 06:21:22 +08:00
use crate::base::allocator::{Allocator, InnerAllocator, Reallocator};
2020-04-06 00:49:48 +08:00
use crate::base::array_storage::ArrayStorage;
2018-05-27 03:02:24 +08:00
#[cfg(any(feature = "alloc", feature = "std"))]
2019-03-23 21:29:07 +08:00
use crate::base::dimension::Dynamic;
use crate::base::dimension::{Dim, DimName};
use crate::base::storage::{ContiguousStorageMut, Storage, StorageMut};
#[cfg(any(feature = "std", feature = "alloc"))]
2019-03-23 21:29:07 +08:00
use crate::base::vec_storage::VecStorage;
2021-07-15 06:21:22 +08:00
use crate::storage::Owned;
type DefaultBuffer<T, R, C> = <DefaultAllocator as InnerAllocator<T, R, C>>::Buffer;
type DefaultUninitBuffer<T, R, C> =
<DefaultAllocator as InnerAllocator<MaybeUninit<T>, R, C>>::Buffer;
/*
*
* Allocator.
*
*/
2021-07-17 15:52:57 +08:00
/// A helper struct that controls how the storage for a matrix should be allocated.
///
/// This struct is useless on its own. Instead, it's used in trait
/// An allocator based on `GenericArray` and `VecStorage` for statically-sized and dynamically-sized
/// matrices respectively.
pub struct DefaultAllocator;
// Static - Static
2021-07-15 06:21:22 +08:00
impl<T, const R: usize, const C: usize> InnerAllocator<T, Const<R>, Const<C>> for DefaultAllocator {
2021-04-11 17:00:38 +08:00
type Buffer = ArrayStorage<T, R, C>;
#[inline]
2021-04-11 17:00:38 +08:00
fn allocate_from_iterator<I: IntoIterator<Item = T>>(
nrows: Const<R>,
ncols: Const<C>,
2018-02-02 19:26:35 +08:00
iter: I,
2020-04-06 00:49:48 +08:00
) -> Self::Buffer {
2021-07-14 17:25:16 +08:00
let mut res = Self::allocate_uninitialized(nrows, ncols);
let mut count = 0;
for (res, e) in res.as_mut_slice().iter_mut().zip(iter.into_iter()) {
2021-07-14 17:25:16 +08:00
*res = MaybeUninit::new(e);
count += 1;
}
2018-02-02 19:26:35 +08:00
assert!(
count == nrows.value() * ncols.value(),
"Matrix init. from iterator: iterator not long enough."
);
2021-07-14 17:25:16 +08:00
// Safety: we have initialized all entries.
unsafe { <Self as Allocator<T, Const<R>, Const<C>>>::assume_init(res) }
}
}
2021-07-15 06:21:22 +08:00
impl<T, const R: usize, const C: usize> Allocator<T, Const<R>, Const<C>> for DefaultAllocator {
#[inline]
fn allocate_uninitialized(
_: Const<R>,
_: Const<C>,
) -> Owned<MaybeUninit<T>, Const<R>, Const<C>> {
ArrayStorage([[MaybeUninit::uninit(); R]; C])
}
#[inline]
unsafe fn assume_init(
uninit: <Self as InnerAllocator<MaybeUninit<T>, Const<R>, Const<C>>>::Buffer,
) -> Owned<T, Const<R>, Const<C>> {
2021-07-16 12:56:58 +08:00
// SAFETY:
// * The caller guarantees that all elements of the array are initialized
// * `MaybeUninit<T>` and T are guaranteed to have the same layout
// * MaybeUnint does not drop, so there are no double-frees
// * `ArrayStorage` is transparent.
// And thus the conversion is safe
ArrayStorage((&uninit as *const _ as *const [_; C]).read())
2021-07-15 06:21:22 +08:00
}
}
// Dynamic - Static
// Dynamic - Dynamic
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-15 06:21:22 +08:00
impl<T, C: Dim> InnerAllocator<T, Dynamic, C> for DefaultAllocator {
2021-04-11 17:00:38 +08:00
type Buffer = VecStorage<T, Dynamic, C>;
#[inline]
2021-07-15 06:21:22 +08:00
fn allocate_from_iterator<I: IntoIterator<Item = T>>(
nrows: Dynamic,
ncols: C,
iter: I,
) -> Self::Buffer {
let it = iter.into_iter();
let res: Vec<T> = it.collect();
assert!(res.len() == nrows.value() * ncols.value(),
"Allocation from iterator error: the iterator did not yield the correct number of elements.");
VecStorage::new(nrows, ncols, res)
}
}
impl<T, C: Dim> Allocator<T, Dynamic, C> for DefaultAllocator {
#[inline]
fn allocate_uninitialized(nrows: Dynamic, ncols: C) -> Owned<MaybeUninit<T>, Dynamic, C> {
2021-07-14 17:25:16 +08:00
let mut data = Vec::new();
let length = nrows.value() * ncols.value();
2021-07-14 17:25:16 +08:00
data.reserve_exact(length);
data.resize_with(length, MaybeUninit::uninit);
2021-07-14 17:25:16 +08:00
VecStorage::new(nrows, ncols, data)
}
#[inline]
2021-07-15 06:21:22 +08:00
unsafe fn assume_init(uninit: Owned<MaybeUninit<T>, Dynamic, C>) -> Owned<T, Dynamic, C> {
2021-07-14 17:25:16 +08:00
let mut data = ManuallyDrop::new(uninit.data);
// Safety: MaybeUninit<T> has the same alignment and layout as T.
let new_data = unsafe {
Vec::from_raw_parts(data.as_mut_ptr() as *mut T, data.len(), data.capacity())
};
VecStorage::new(uninit.nrows, uninit.ncols, new_data)
}
2021-07-15 06:21:22 +08:00
}
// Static - Dynamic
#[cfg(any(feature = "std", feature = "alloc"))]
impl<T, R: DimName> InnerAllocator<T, R, Dynamic> for DefaultAllocator {
type Buffer = VecStorage<T, R, Dynamic>;
#[inline]
2021-04-11 17:00:38 +08:00
fn allocate_from_iterator<I: IntoIterator<Item = T>>(
2021-07-15 06:21:22 +08:00
nrows: R,
ncols: Dynamic,
2018-02-02 19:26:35 +08:00
iter: I,
2021-07-15 06:21:22 +08:00
) -> Owned<T, R, Dynamic> {
let it = iter.into_iter();
2021-04-11 17:00:38 +08:00
let res: Vec<T> = it.collect();
assert!(res.len() == nrows.value() * ncols.value(),
"Allocation from iterator error: the iterator did not yield the correct number of elements.");
VecStorage::new(nrows, ncols, res)
}
}
2021-07-14 17:25:16 +08:00
impl<T, R: DimName> Allocator<T, R, Dynamic> for DefaultAllocator {
#[inline]
2021-07-15 06:21:22 +08:00
fn allocate_uninitialized(nrows: R, ncols: Dynamic) -> Owned<MaybeUninit<T>, R, Dynamic> {
2021-07-14 17:25:16 +08:00
let mut data = Vec::new();
let length = nrows.value() * ncols.value();
2021-07-14 17:25:16 +08:00
data.reserve_exact(length);
data.resize_with(length, MaybeUninit::uninit);
VecStorage::new(nrows, ncols, data)
}
#[inline]
2021-07-15 06:21:22 +08:00
unsafe fn assume_init(uninit: Owned<MaybeUninit<T>, R, Dynamic>) -> Owned<T, R, Dynamic> {
2021-07-14 17:25:16 +08:00
let mut data = ManuallyDrop::new(uninit.data);
2021-07-14 17:25:16 +08:00
// Safety: MaybeUninit<T> has the same alignment and layout as T.
let new_data = unsafe {
Vec::from_raw_parts(data.as_mut_ptr() as *mut T, data.len(), data.capacity())
};
VecStorage::new(uninit.nrows, uninit.ncols, new_data)
}
}
/*
*
* Reallocator.
*
*/
// Anything -> Static × Static
2021-07-16 14:53:28 +08:00
impl<T, RFrom:Dim, CFrom:Dim, const RTO: usize, const CTO: usize>
2021-04-11 17:00:38 +08:00
Reallocator<T, RFrom, CFrom, Const<RTO>, Const<CTO>> for DefaultAllocator
2018-02-02 19:26:35 +08:00
where
2021-04-11 17:00:38 +08:00
Self: Allocator<T, RFrom, CFrom>,
2018-02-02 19:26:35 +08:00
{
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: Const<RTO>,
cto: Const<CTO>,
2021-07-15 06:21:22 +08:00
buf: Owned<T, RFrom, CFrom>,
2021-04-11 17:00:38 +08:00
) -> ArrayStorage<T, RTO, CTO> {
2020-11-28 05:00:48 +08:00
let mut res =
2021-07-15 06:21:22 +08:00
<Self as Allocator<_, Const<RTO>, Const<CTO>>>::allocate_uninitialized(rto, cto);
let (rfrom, cfrom) = buf.shape();
let len_from = rfrom.value() * cfrom.value();
2018-02-02 19:26:35 +08:00
let len_to = rto.value() * cto.value();
2021-07-14 17:25:16 +08:00
ptr::copy_nonoverlapping(
buf.ptr(),
res.ptr_mut() as *mut T,
cmp::min(len_from, len_to),
);
2021-07-14 17:25:16 +08:00
// Safety: TODO
2021-07-16 12:56:58 +08:00
<Self as Allocator<_, Const<RTO>, Const<CTO>>>::assume_init(res)
}
}
// Static × Static -> Dynamic × Any
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-14 17:25:16 +08:00
impl<T, CTo, const RFROM: usize, const CFROM: usize>
2021-04-11 17:00:38 +08:00
Reallocator<T, Const<RFROM>, Const<CFROM>, Dynamic, CTo> for DefaultAllocator
2018-02-02 19:26:35 +08:00
where
CTo: Dim,
{
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: Dynamic,
cto: CTo,
2021-04-11 17:00:38 +08:00
buf: ArrayStorage<T, RFROM, CFROM>,
) -> VecStorage<T, Dynamic, CTo> {
2021-07-14 17:25:16 +08:00
let mut res = <Self as Allocator<T, Dynamic, CTo>>::allocate_uninitialized(rto, cto);
let (rfrom, cfrom) = buf.shape();
let len_from = rfrom.value() * cfrom.value();
2018-02-02 19:26:35 +08:00
let len_to = rto.value() * cto.value();
2021-07-14 17:25:16 +08:00
ptr::copy_nonoverlapping(
buf.ptr(),
res.ptr_mut() as *mut T,
cmp::min(len_from, len_to),
);
2021-07-14 17:25:16 +08:00
<Self as Allocator<T, Dynamic, CTo>>::assume_init(res)
}
}
// Static × Static -> Static × Dynamic
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-14 17:25:16 +08:00
impl<T, RTo, const RFROM: usize, const CFROM: usize>
2021-04-11 17:00:38 +08:00
Reallocator<T, Const<RFROM>, Const<CFROM>, RTo, Dynamic> for DefaultAllocator
2018-02-02 19:26:35 +08:00
where
RTo: DimName,
{
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: RTo,
cto: Dynamic,
2021-04-11 17:00:38 +08:00
buf: ArrayStorage<T, RFROM, CFROM>,
) -> VecStorage<T, RTo, Dynamic> {
2021-07-14 17:25:16 +08:00
let mut res = <Self as Allocator<T, RTo, Dynamic>>::allocate_uninitialized(rto, cto);
let (rfrom, cfrom) = buf.shape();
let len_from = rfrom.value() * cfrom.value();
2018-02-02 19:26:35 +08:00
let len_to = rto.value() * cto.value();
2021-07-14 17:25:16 +08:00
ptr::copy_nonoverlapping(
buf.ptr(),
res.ptr_mut() as *mut T,
cmp::min(len_from, len_to),
);
2021-07-14 17:25:16 +08:00
<Self as Allocator<T, RTo, Dynamic>>::assume_init(res)
}
}
// All conversion from a dynamic buffer to a dynamic buffer.
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-14 17:25:16 +08:00
impl<T, CFrom: Dim, CTo: Dim> Reallocator<T, Dynamic, CFrom, Dynamic, CTo> for DefaultAllocator {
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: Dynamic,
cto: CTo,
2021-04-11 17:00:38 +08:00
buf: VecStorage<T, Dynamic, CFrom>,
) -> VecStorage<T, Dynamic, CTo> {
let new_buf = buf.resize(rto.value() * cto.value());
VecStorage::new(rto, cto, new_buf)
}
}
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-14 17:25:16 +08:00
impl<T, CFrom: Dim, RTo: DimName> Reallocator<T, Dynamic, CFrom, RTo, Dynamic>
for DefaultAllocator
{
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: RTo,
cto: Dynamic,
2021-04-11 17:00:38 +08:00
buf: VecStorage<T, Dynamic, CFrom>,
) -> VecStorage<T, RTo, Dynamic> {
let new_buf = buf.resize(rto.value() * cto.value());
VecStorage::new(rto, cto, new_buf)
}
}
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-14 17:25:16 +08:00
impl<T, RFrom: DimName, CTo: Dim> Reallocator<T, RFrom, Dynamic, Dynamic, CTo>
for DefaultAllocator
{
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: Dynamic,
cto: CTo,
2021-04-11 17:00:38 +08:00
buf: VecStorage<T, RFrom, Dynamic>,
) -> VecStorage<T, Dynamic, CTo> {
let new_buf = buf.resize(rto.value() * cto.value());
VecStorage::new(rto, cto, new_buf)
}
}
#[cfg(any(feature = "std", feature = "alloc"))]
2021-07-14 17:25:16 +08:00
impl<T, RFrom: DimName, RTo: DimName> Reallocator<T, RFrom, Dynamic, RTo, Dynamic>
for DefaultAllocator
{
#[inline]
2018-02-02 19:26:35 +08:00
unsafe fn reallocate_copy(
rto: RTo,
cto: Dynamic,
2021-04-11 17:00:38 +08:00
buf: VecStorage<T, RFrom, Dynamic>,
) -> VecStorage<T, RTo, Dynamic> {
let new_buf = buf.resize(rto.value() * cto.value());
VecStorage::new(rto, cto, new_buf)
}
}