This commit is contained in:
Saurabh 2022-03-21 16:57:32 -06:00
commit 04a97bb79e
12 changed files with 543 additions and 122 deletions

View File

@ -14,6 +14,7 @@ use crate::coo::CooMatrix;
use crate::cs; use crate::cs;
use crate::csc::CscMatrix; use crate::csc::CscMatrix;
use crate::csr::CsrMatrix; use crate::csr::CsrMatrix;
use crate::utils::{apply_permutation, compute_sort_permutation};
/// Converts a dense matrix to [`CooMatrix`]. /// Converts a dense matrix to [`CooMatrix`].
pub fn convert_dense_coo<T, R, C, S>(dense: &Matrix<T, R, C, S>) -> CooMatrix<T> pub fn convert_dense_coo<T, R, C, S>(dense: &Matrix<T, R, C, S>) -> CooMatrix<T>
@ -376,29 +377,12 @@ fn sort_lane<T: Clone>(
assert_eq!(values.len(), workspace.len()); assert_eq!(values.len(), workspace.len());
let permutation = workspace; let permutation = workspace;
// Set permutation to identity compute_sort_permutation(permutation, minor_idx);
for (i, p) in permutation.iter_mut().enumerate() {
*p = i;
}
// Compute permutation needed to bring minor indices into sorted order
// Note: Using sort_unstable here avoids internal allocations, which is crucial since
// each lane might have a small number of elements
permutation.sort_unstable_by_key(|idx| minor_idx[*idx]);
apply_permutation(minor_idx_result, minor_idx, permutation); apply_permutation(minor_idx_result, minor_idx, permutation);
apply_permutation(values_result, values, permutation); apply_permutation(values_result, values, permutation);
} }
// TODO: Move this into `utils` or something?
fn apply_permutation<T: Clone>(out_slice: &mut [T], in_slice: &[T], permutation: &[usize]) {
assert_eq!(out_slice.len(), in_slice.len());
assert_eq!(out_slice.len(), permutation.len());
for (out_element, old_pos) in out_slice.iter_mut().zip(permutation) {
*out_element = in_slice[*old_pos].clone();
}
}
/// Given *sorted* indices and corresponding scalar values, combines duplicates with the given /// Given *sorted* indices and corresponding scalar values, combines duplicates with the given
/// associative combiner and calls the provided produce methods with combined indices and values. /// associative combiner and calls the provided produce methods with combined indices and values.
fn combine_duplicates<T: Clone>( fn combine_duplicates<T: Clone>(

View File

@ -6,7 +6,8 @@ use num_traits::One;
use nalgebra::Scalar; use nalgebra::Scalar;
use crate::pattern::SparsityPattern; use crate::pattern::SparsityPattern;
use crate::{SparseEntry, SparseEntryMut}; use crate::utils::{apply_permutation, compute_sort_permutation};
use crate::{SparseEntry, SparseEntryMut, SparseFormatError, SparseFormatErrorKind};
/// An abstract compressed matrix. /// An abstract compressed matrix.
/// ///
@ -543,3 +544,151 @@ pub fn convert_counts_to_offsets(counts: &mut [usize]) {
offset += count; offset += count;
} }
} }
/// Validates cs data, optionally sorts minor indices and values
pub(crate) fn validate_and_optionally_sort_cs_data<T>(
major_dim: usize,
minor_dim: usize,
major_offsets: &[usize],
minor_indices: &mut [usize],
values: Option<&mut [T]>,
sort: bool,
) -> Result<(), SparseFormatError>
where
T: Scalar,
{
let mut values_option = values;
if let Some(values) = values_option.as_mut() {
if minor_indices.len() != values.len() {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"Number of values and minor indices must be the same.",
));
}
} else if sort {
unreachable!("Internal error: Sorting currently not supported if no values are present.");
}
if major_offsets.len() == 0 {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"Number of offsets should be greater than 0.",
));
}
if major_offsets.len() != major_dim + 1 {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"Length of offset array is not equal to (major_dim + 1).",
));
}
// Check that the first and last offsets conform to the specification
{
let first_offset_ok = *major_offsets.first().unwrap() == 0;
let last_offset_ok = *major_offsets.last().unwrap() == minor_indices.len();
if !first_offset_ok || !last_offset_ok {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"First or last offset is incompatible with format.",
));
}
}
// Set up required buffers up front
let mut minor_idx_buffer: Vec<usize> = Vec::new();
let mut values_buffer: Vec<T> = Vec::new();
let mut minor_index_permutation: Vec<usize> = Vec::new();
// Test that each lane has strictly monotonically increasing minor indices, i.e.
// minor indices within a lane are sorted, unique. Sort minor indices within a lane if needed.
// In addition, each minor index must be in bounds with respect to the minor dimension.
{
for lane_idx in 0..major_dim {
let range_start = major_offsets[lane_idx];
let range_end = major_offsets[lane_idx + 1];
// Test that major offsets are monotonically increasing
if range_start > range_end {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"Offsets are not monotonically increasing.",
));
}
let minor_idx_in_lane = minor_indices.get(range_start..range_end).ok_or(
SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::IndexOutOfBounds,
"A major offset is out of bounds.",
),
)?;
// We test for in-bounds, uniqueness and monotonicity at the same time
// to ensure that we only visit each minor index once
let mut prev = None;
let mut monotonic = true;
for &minor_idx in minor_idx_in_lane {
if minor_idx >= minor_dim {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::IndexOutOfBounds,
"A minor index is out of bounds.",
));
}
if let Some(prev) = prev {
if prev >= minor_idx {
if !sort {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"Minor indices are not strictly monotonically increasing in each lane.",
));
}
monotonic = false;
}
}
prev = Some(minor_idx);
}
// sort if indices are not monotonic and sorting is expected
if !monotonic && sort {
let range_size = range_end - range_start;
minor_index_permutation.resize(range_size, 0);
compute_sort_permutation(&mut minor_index_permutation, &minor_idx_in_lane);
minor_idx_buffer.clear();
minor_idx_buffer.extend_from_slice(&minor_idx_in_lane);
apply_permutation(
&mut minor_indices[range_start..range_end],
&minor_idx_buffer,
&minor_index_permutation,
);
// check duplicates
prev = None;
for &minor_idx in &minor_indices[range_start..range_end] {
if let Some(prev) = prev {
if prev == minor_idx {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::DuplicateEntry,
"Input data contains duplicate entries.",
));
}
}
prev = Some(minor_idx);
}
// sort values if they exist
if let Some(values) = values_option.as_mut() {
values_buffer.clear();
values_buffer.extend_from_slice(&values[range_start..range_end]);
apply_permutation(
&mut values[range_start..range_end],
&values_buffer,
&minor_index_permutation,
);
}
}
}
}
Ok(())
}

View File

@ -6,6 +6,7 @@
#[cfg(feature = "serde-serialize")] #[cfg(feature = "serde-serialize")]
mod csc_serde; mod csc_serde;
use crate::cs;
use crate::cs::{CsLane, CsLaneIter, CsLaneIterMut, CsLaneMut, CsMatrix}; use crate::cs::{CsLane, CsLaneIter, CsLaneIterMut, CsLaneMut, CsMatrix};
use crate::csr::CsrMatrix; use crate::csr::CsrMatrix;
use crate::pattern::{SparsityPattern, SparsityPatternFormatError, SparsityPatternIter}; use crate::pattern::{SparsityPattern, SparsityPatternFormatError, SparsityPatternIter};
@ -173,6 +174,50 @@ impl<T> CscMatrix<T> {
Self::try_from_pattern_and_values(pattern, values) Self::try_from_pattern_and_values(pattern, values)
} }
/// Try to construct a CSC matrix from raw CSC data with unsorted row indices.
///
/// It is assumed that each column contains unique row indices that are in
/// bounds with respect to the number of rows in the matrix. If this is not the case,
/// an error is returned to indicate the failure.
///
/// An error is returned if the data given does not conform to the CSC storage format
/// with the exception of having unsorted row indices and values.
/// See the documentation for [CscMatrix](struct.CscMatrix.html) for more information.
pub fn try_from_unsorted_csc_data(
num_rows: usize,
num_cols: usize,
col_offsets: Vec<usize>,
mut row_indices: Vec<usize>,
mut values: Vec<T>,
) -> Result<Self, SparseFormatError>
where
T: Scalar,
{
let result = cs::validate_and_optionally_sort_cs_data(
num_cols,
num_rows,
&col_offsets,
&mut row_indices,
Some(&mut values),
true,
);
match result {
Ok(()) => {
let pattern = unsafe {
SparsityPattern::from_offset_and_indices_unchecked(
num_cols,
num_rows,
col_offsets,
row_indices,
)
};
Self::try_from_pattern_and_values(pattern, values)
}
Err(err) => Err(err),
}
}
/// Try to construct a CSC matrix from a sparsity pattern and associated non-zero values. /// Try to construct a CSC matrix from a sparsity pattern and associated non-zero values.
/// ///
/// Returns an error if the number of values does not match the number of minor indices /// Returns an error if the number of values does not match the number of minor indices

View File

@ -6,6 +6,7 @@
#[cfg(feature = "serde-serialize")] #[cfg(feature = "serde-serialize")]
mod csr_serde; mod csr_serde;
use crate::cs;
use crate::cs::{CsLane, CsLaneIter, CsLaneIterMut, CsLaneMut, CsMatrix}; use crate::cs::{CsLane, CsLaneIter, CsLaneIterMut, CsLaneMut, CsMatrix};
use crate::csc::CscMatrix; use crate::csc::CscMatrix;
use crate::pattern::{SparsityPattern, SparsityPatternFormatError, SparsityPatternIter}; use crate::pattern::{SparsityPattern, SparsityPatternFormatError, SparsityPatternIter};
@ -13,7 +14,7 @@ use crate::{SparseEntry, SparseEntryMut, SparseFormatError, SparseFormatErrorKin
use nalgebra::Scalar; use nalgebra::Scalar;
use num_traits::One; use num_traits::One;
use std::iter::FromIterator;
use std::slice::{Iter, IterMut}; use std::slice::{Iter, IterMut};
/// A CSR representation of a sparse matrix. /// A CSR representation of a sparse matrix.
@ -187,62 +188,35 @@ impl<T> CsrMatrix<T> {
num_rows: usize, num_rows: usize,
num_cols: usize, num_cols: usize,
row_offsets: Vec<usize>, row_offsets: Vec<usize>,
col_indices: Vec<usize>, mut col_indices: Vec<usize>,
values: Vec<T>, mut values: Vec<T>,
) -> Result<Self, SparseFormatError> ) -> Result<Self, SparseFormatError>
where where
T: Scalar, T: Scalar,
{ {
use SparsityPatternFormatError::*; let result = cs::validate_and_optionally_sort_cs_data(
let count = col_indices.len(); num_rows,
let mut p: Vec<usize> = (0..count).collect(); num_cols,
&row_offsets,
&mut col_indices,
Some(&mut values),
true,
);
if col_indices.len() != values.len() { match result {
return Err(SparseFormatError::from_kind_and_msg( Ok(()) => {
SparseFormatErrorKind::InvalidStructure, let pattern = unsafe {
"Number of values and column indices must be the same", SparsityPattern::from_offset_and_indices_unchecked(
));
}
if row_offsets.len() == 0 {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"Number of offsets should be greater than 0",
));
}
for (index, &offset) in row_offsets[0..row_offsets.len() - 1].iter().enumerate() {
let next_offset = row_offsets[index + 1];
if next_offset > count {
return Err(SparseFormatError::from_kind_and_msg(
SparseFormatErrorKind::InvalidStructure,
"No row offset should be greater than the number of column indices",
));
}
if offset > next_offset {
return Err(NonmonotonicOffsets).map_err(pattern_format_error_to_csr_error);
}
p[offset..next_offset].sort_by(|a, b| {
let x = &col_indices[*a];
let y = &col_indices[*b];
x.partial_cmp(y).unwrap()
});
}
// permute indices
let sorted_col_indices: Vec<usize> =
Vec::from_iter((p.iter().map(|i| &col_indices[*i])).cloned());
// permute values
let sorted_values: Vec<T> = Vec::from_iter((p.iter().map(|i| &values[*i])).cloned());
return Self::try_from_csr_data(
num_rows, num_rows,
num_cols, num_cols,
row_offsets, row_offsets,
sorted_col_indices, col_indices,
sorted_values, )
); };
Self::try_from_pattern_and_values(pattern, values)
}
Err(err) => Err(err),
}
} }
/// Try to construct a CSR matrix from a sparsity pattern and associated non-zero values. /// Try to construct a CSR matrix from a sparsity pattern and associated non-zero values.

View File

@ -160,6 +160,7 @@ pub mod ops;
pub mod pattern; pub mod pattern;
pub(crate) mod cs; pub(crate) mod cs;
pub(crate) mod utils;
#[cfg(feature = "proptest-support")] #[cfg(feature = "proptest-support")]
pub mod proptest; pub mod proptest;

View File

@ -188,6 +188,35 @@ impl SparsityPattern {
}) })
} }
/// Try to construct a sparsity pattern from the given dimensions, major offsets
/// and minor indices.
///
/// # Panics
///
/// Panics if the number of major offsets is not exactly one greater than the major dimension
/// or if major offsets do not start with 0 and end with the number of minor indices.
pub unsafe fn from_offset_and_indices_unchecked(
major_dim: usize,
minor_dim: usize,
major_offsets: Vec<usize>,
minor_indices: Vec<usize>,
) -> Self {
assert_eq!(major_offsets.len(), major_dim + 1);
// Check that the first and last offsets conform to the specification
{
let first_offset_ok = *major_offsets.first().unwrap() == 0;
let last_offset_ok = *major_offsets.last().unwrap() == minor_indices.len();
assert!(first_offset_ok && last_offset_ok);
}
Self {
major_offsets,
minor_indices,
minor_dim,
}
}
/// An iterator over the explicitly stored "non-zero" entries (i, j). /// An iterator over the explicitly stored "non-zero" entries (i, j).
/// ///
/// The iteration happens in a lane-major fashion, meaning that the lane index i /// The iteration happens in a lane-major fashion, meaning that the lane index i

View File

@ -0,0 +1,26 @@
//! Helper functions for sparse matrix computations
/// permutes entries of in_slice according to permutation slice and puts them to out_slice
#[inline]
pub fn apply_permutation<T: Clone>(out_slice: &mut [T], in_slice: &[T], permutation: &[usize]) {
assert_eq!(out_slice.len(), in_slice.len());
assert_eq!(out_slice.len(), permutation.len());
for (out_element, old_pos) in out_slice.iter_mut().zip(permutation) {
*out_element = in_slice[*old_pos].clone();
}
}
/// computes permutation by using provided indices as keys
#[inline]
pub fn compute_sort_permutation(permutation: &mut [usize], indices: &[usize]) {
assert_eq!(permutation.len(), indices.len());
// Set permutation to identity
for (i, p) in permutation.iter_mut().enumerate() {
*p = i;
}
// Compute permutation needed to bring minor indices into sorted order
// Note: Using sort_unstable here avoids internal allocations, which is crucial since
// each lane might have a small number of elements
permutation.sort_unstable_by_key(|idx| indices[*idx]);
}

View File

@ -5,6 +5,8 @@ use nalgebra_sparse::{SparseEntry, SparseEntryMut, SparseFormatErrorKind};
use proptest::prelude::*; use proptest::prelude::*;
use proptest::sample::subsequence; use proptest::sample::subsequence;
use super::test_data_examples::{InvalidCsDataExamples, ValidCsDataExamples};
use crate::assert_panics; use crate::assert_panics;
use crate::common::csc_strategy; use crate::common::csc_strategy;
@ -171,11 +173,26 @@ fn csc_matrix_valid_data() {
} }
} }
#[test]
fn csc_matrix_valid_data_unsorted_column_indices() {
let valid_data: ValidCsDataExamples = ValidCsDataExamples::new();
let (offsets, indices, values) = valid_data.valid_unsorted_cs_data;
let csc = CscMatrix::try_from_unsorted_csc_data(5, 4, offsets, indices, values).unwrap();
let (offsets2, indices2, values2) = valid_data.valid_cs_data;
let expected_csc = CscMatrix::try_from_csc_data(5, 4, offsets2, indices2, values2).unwrap();
assert_eq!(csc, expected_csc);
}
#[test] #[test]
fn csc_matrix_try_from_invalid_csc_data() { fn csc_matrix_try_from_invalid_csc_data() {
let invalid_data: InvalidCsDataExamples = InvalidCsDataExamples::new();
{ {
// Empty offset array (invalid length) // Empty offset array (invalid length)
let matrix = CscMatrix::try_from_csc_data(0, 0, Vec::new(), Vec::new(), Vec::<u32>::new()); let (offsets, indices, values) = invalid_data.empty_offset_array;
let matrix = CscMatrix::try_from_csc_data(0, 0, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure &SparseFormatErrorKind::InvalidStructure
@ -184,10 +201,8 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Offset array invalid length for arbitrary data // Offset array invalid length for arbitrary data
let offsets = vec![0, 3, 5]; let (offsets, indices, values) =
let indices = vec![0, 1, 2, 3, 5]; invalid_data.offset_array_invalid_length_for_arbitrary_data;
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -197,9 +212,7 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Invalid first entry in offsets array // Invalid first entry in offsets array
let offsets = vec![1, 2, 2, 5]; let (offsets, indices, values) = invalid_data.invalid_first_entry_in_offsets_array;
let indices = vec![0, 5, 1, 2, 3];
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -209,9 +222,7 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Invalid last entry in offsets array // Invalid last entry in offsets array
let offsets = vec![0, 2, 2, 4]; let (offsets, indices, values) = invalid_data.invalid_last_entry_in_offsets_array;
let indices = vec![0, 5, 1, 2, 3];
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -221,9 +232,7 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Invalid length of offsets array // Invalid length of offsets array
let offsets = vec![0, 2, 2]; let (offsets, indices, values) = invalid_data.invalid_length_of_offsets_array;
let indices = vec![0, 5, 1, 2, 3];
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -233,9 +242,7 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Nonmonotonic offsets // Nonmonotonic offsets
let offsets = vec![0, 3, 2, 5]; let (offsets, indices, values) = invalid_data.nonmonotonic_offsets;
let indices = vec![0, 1, 2, 3, 4];
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -257,9 +264,7 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Minor index out of bounds // Minor index out of bounds
let offsets = vec![0, 2, 2, 5]; let (offsets, indices, values) = invalid_data.minor_index_out_of_bounds;
let indices = vec![0, 6, 1, 2, 3];
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -269,9 +274,7 @@ fn csc_matrix_try_from_invalid_csc_data() {
{ {
// Duplicate entry // Duplicate entry
let offsets = vec![0, 2, 2, 5]; let (offsets, indices, values) = invalid_data.duplicate_entry;
let indices = vec![0, 5, 2, 2, 3];
let values = vec![0, 1, 2, 3, 4];
let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values); let matrix = CscMatrix::try_from_csc_data(6, 3, offsets, indices, values);
assert_eq!( assert_eq!(
matrix.unwrap_err().kind(), matrix.unwrap_err().kind(),
@ -280,6 +283,121 @@ fn csc_matrix_try_from_invalid_csc_data() {
} }
} }
#[test]
fn csc_matrix_try_from_unsorted_invalid_csc_data() {
let invalid_data: InvalidCsDataExamples = InvalidCsDataExamples::new();
{
// Empty offset array (invalid length)
let (offsets, indices, values) = invalid_data.empty_offset_array;
let matrix = CscMatrix::try_from_unsorted_csc_data(0, 0, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
{
// Offset array invalid length for arbitrary data
let (offsets, indices, values) =
invalid_data.offset_array_invalid_length_for_arbitrary_data;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
{
// Invalid first entry in offsets array
let (offsets, indices, values) = invalid_data.invalid_first_entry_in_offsets_array;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
{
// Invalid last entry in offsets array
let (offsets, indices, values) = invalid_data.invalid_last_entry_in_offsets_array;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
{
// Invalid length of offsets array
let (offsets, indices, values) = invalid_data.invalid_length_of_offsets_array;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
{
// Nonmonotonic offsets
let (offsets, indices, values) = invalid_data.nonmonotonic_offsets;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
{
// Major offset out of bounds
let (offsets, indices, values) = invalid_data.major_offset_out_of_bounds;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::IndexOutOfBounds
);
}
{
// Minor index out of bounds
let (offsets, indices, values) = invalid_data.minor_index_out_of_bounds;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::IndexOutOfBounds
);
}
{
// Duplicate entry
let (offsets, indices, values) = invalid_data.duplicate_entry;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::DuplicateEntry
);
}
{
// Duplicate entry in unsorted lane
let (offsets, indices, values) = invalid_data.duplicate_entry_unsorted;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::DuplicateEntry
);
}
{
// Wrong values length
let (offsets, indices, values) = invalid_data.wrong_values_length;
let matrix = CscMatrix::try_from_unsorted_csc_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
}
#[test] #[test]
fn csc_disassemble_avoids_clone_when_owned() { fn csc_disassemble_avoids_clone_when_owned() {
// Test that disassemble avoids cloning the sparsity pattern when it holds the sole reference // Test that disassemble avoids cloning the sparsity pattern when it holds the sole reference

View File

@ -5,7 +5,7 @@ use nalgebra_sparse::{SparseEntry, SparseEntryMut, SparseFormatErrorKind};
use proptest::prelude::*; use proptest::prelude::*;
use proptest::sample::subsequence; use proptest::sample::subsequence;
use super::test_data_examples::InvalidCsrDataExamples; use super::test_data_examples::{InvalidCsDataExamples, ValidCsDataExamples};
use crate::assert_panics; use crate::assert_panics;
use crate::common::csr_strategy; use crate::common::csr_strategy;
@ -175,30 +175,20 @@ fn csr_matrix_valid_data() {
#[test] #[test]
fn csr_matrix_valid_data_unsorted_column_indices() { fn csr_matrix_valid_data_unsorted_column_indices() {
let csr = CsrMatrix::try_from_unsorted_csr_data( let valid_data: ValidCsDataExamples = ValidCsDataExamples::new();
4,
5,
vec![0, 3, 5, 8, 11],
vec![4, 1, 3, 3, 1, 2, 3, 0, 3, 4, 1],
vec![5, 1, 4, 7, 4, 2, 3, 1, 8, 9, 6],
)
.unwrap();
let expected_csr = CsrMatrix::try_from_csr_data( let (offsets, indices, values) = valid_data.valid_unsorted_cs_data;
4, let csr = CsrMatrix::try_from_unsorted_csr_data(4, 5, offsets, indices, values).unwrap();
5,
vec![0, 3, 5, 8, 11], let (offsets2, indices2, values2) = valid_data.valid_cs_data;
vec![1, 3, 4, 1, 3, 0, 2, 3, 1, 3, 4], let expected_csr = CsrMatrix::try_from_csr_data(4, 5, offsets2, indices2, values2).unwrap();
vec![1, 4, 5, 4, 7, 1, 2, 3, 6, 8, 9],
)
.unwrap();
assert_eq!(csr, expected_csr); assert_eq!(csr, expected_csr);
} }
#[test] #[test]
fn csr_matrix_try_from_invalid_csr_data() { fn csr_matrix_try_from_invalid_csr_data() {
let invalid_data: InvalidCsrDataExamples = InvalidCsrDataExamples::new(); let invalid_data: InvalidCsDataExamples = InvalidCsDataExamples::new();
{ {
// Empty offset array (invalid length) // Empty offset array (invalid length)
let (offsets, indices, values) = invalid_data.empty_offset_array; let (offsets, indices, values) = invalid_data.empty_offset_array;
@ -293,7 +283,7 @@ fn csr_matrix_try_from_invalid_csr_data() {
#[test] #[test]
fn csr_matrix_try_from_unsorted_invalid_csr_data() { fn csr_matrix_try_from_unsorted_invalid_csr_data() {
let invalid_data: InvalidCsrDataExamples = InvalidCsrDataExamples::new(); let invalid_data: InvalidCsDataExamples = InvalidCsDataExamples::new();
{ {
// Empty offset array (invalid length) // Empty offset array (invalid length)
let (offsets, indices, values) = invalid_data.empty_offset_array; let (offsets, indices, values) = invalid_data.empty_offset_array;
@ -355,6 +345,16 @@ fn csr_matrix_try_from_unsorted_invalid_csr_data() {
); );
} }
{
// Major offset out of bounds
let (offsets, indices, values) = invalid_data.major_offset_out_of_bounds;
let matrix = CsrMatrix::try_from_unsorted_csr_data(3, 6, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::IndexOutOfBounds
);
}
{ {
// Minor index out of bounds // Minor index out of bounds
let (offsets, indices, values) = invalid_data.minor_index_out_of_bounds; let (offsets, indices, values) = invalid_data.minor_index_out_of_bounds;
@ -374,6 +374,26 @@ fn csr_matrix_try_from_unsorted_invalid_csr_data() {
&SparseFormatErrorKind::DuplicateEntry &SparseFormatErrorKind::DuplicateEntry
); );
} }
{
// Duplicate entry in unsorted lane
let (offsets, indices, values) = invalid_data.duplicate_entry_unsorted;
let matrix = CsrMatrix::try_from_unsorted_csr_data(3, 6, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::DuplicateEntry
);
}
{
// Wrong values length
let (offsets, indices, values) = invalid_data.wrong_values_length;
let matrix = CsrMatrix::try_from_unsorted_csr_data(6, 3, offsets, indices, values);
assert_eq!(
matrix.unwrap_err().kind(),
&SparseFormatErrorKind::InvalidStructure
);
}
} }
#[test] #[test]

View File

@ -1,5 +1,31 @@
/// Examples of *invalid* raw CSR data `(offsets, indices, values)`. /// Examples of *valid* raw CS data `(offsets, indices, values)`.
pub struct InvalidCsrDataExamples { pub struct ValidCsDataExamples {
pub valid_cs_data: (Vec<usize>, Vec<usize>, Vec<i32>),
pub valid_unsorted_cs_data: (Vec<usize>, Vec<usize>, Vec<i32>),
}
impl ValidCsDataExamples {
pub fn new() -> Self {
let valid_cs_data = (
vec![0, 3, 5, 8, 11],
vec![1, 3, 4, 1, 3, 0, 2, 3, 1, 3, 4],
vec![1, 4, 5, 4, 7, 1, 2, 3, 6, 8, 9],
);
let valid_unsorted_cs_data = (
vec![0, 3, 5, 8, 11],
vec![4, 1, 3, 3, 1, 2, 3, 0, 3, 4, 1],
vec![5, 1, 4, 7, 4, 2, 3, 1, 8, 9, 6],
);
return Self {
valid_cs_data,
valid_unsorted_cs_data,
};
}
}
/// Examples of *invalid* raw CS data `(offsets, indices, values)`.
pub struct InvalidCsDataExamples {
pub empty_offset_array: (Vec<usize>, Vec<usize>, Vec<i32>), pub empty_offset_array: (Vec<usize>, Vec<usize>, Vec<i32>),
pub offset_array_invalid_length_for_arbitrary_data: (Vec<usize>, Vec<usize>, Vec<i32>), pub offset_array_invalid_length_for_arbitrary_data: (Vec<usize>, Vec<usize>, Vec<i32>),
pub invalid_first_entry_in_offsets_array: (Vec<usize>, Vec<usize>, Vec<i32>), pub invalid_first_entry_in_offsets_array: (Vec<usize>, Vec<usize>, Vec<i32>),
@ -7,11 +33,14 @@ pub struct InvalidCsrDataExamples {
pub invalid_length_of_offsets_array: (Vec<usize>, Vec<usize>, Vec<i32>), pub invalid_length_of_offsets_array: (Vec<usize>, Vec<usize>, Vec<i32>),
pub nonmonotonic_offsets: (Vec<usize>, Vec<usize>, Vec<i32>), pub nonmonotonic_offsets: (Vec<usize>, Vec<usize>, Vec<i32>),
pub nonmonotonic_minor_indices: (Vec<usize>, Vec<usize>, Vec<i32>), pub nonmonotonic_minor_indices: (Vec<usize>, Vec<usize>, Vec<i32>),
pub major_offset_out_of_bounds: (Vec<usize>, Vec<usize>, Vec<i32>),
pub minor_index_out_of_bounds: (Vec<usize>, Vec<usize>, Vec<i32>), pub minor_index_out_of_bounds: (Vec<usize>, Vec<usize>, Vec<i32>),
pub duplicate_entry: (Vec<usize>, Vec<usize>, Vec<i32>), pub duplicate_entry: (Vec<usize>, Vec<usize>, Vec<i32>),
pub duplicate_entry_unsorted: (Vec<usize>, Vec<usize>, Vec<i32>),
pub wrong_values_length: (Vec<usize>, Vec<usize>, Vec<i32>),
} }
impl InvalidCsrDataExamples { impl InvalidCsDataExamples {
pub fn new() -> Self { pub fn new() -> Self {
let empty_offset_array = (Vec::<usize>::new(), Vec::<usize>::new(), Vec::<i32>::new()); let empty_offset_array = (Vec::<usize>::new(), Vec::<usize>::new(), Vec::<i32>::new());
let offset_array_invalid_length_for_arbitrary_data = let offset_array_invalid_length_for_arbitrary_data =
@ -25,9 +54,13 @@ impl InvalidCsrDataExamples {
let nonmonotonic_offsets = (vec![0, 3, 2, 5], vec![0, 1, 2, 3, 4], vec![0, 1, 2, 3, 4]); let nonmonotonic_offsets = (vec![0, 3, 2, 5], vec![0, 1, 2, 3, 4], vec![0, 1, 2, 3, 4]);
let nonmonotonic_minor_indices = let nonmonotonic_minor_indices =
(vec![0, 2, 2, 5], vec![0, 2, 3, 1, 4], vec![0, 1, 2, 3, 4]); (vec![0, 2, 2, 5], vec![0, 2, 3, 1, 4], vec![0, 1, 2, 3, 4]);
let major_offset_out_of_bounds =
(vec![0, 7, 2, 5], vec![0, 2, 3, 1, 4], vec![0, 1, 2, 3, 4]);
let minor_index_out_of_bounds = let minor_index_out_of_bounds =
(vec![0, 2, 2, 5], vec![0, 6, 1, 2, 3], vec![0, 1, 2, 3, 4]); (vec![0, 2, 2, 5], vec![0, 6, 1, 2, 3], vec![0, 1, 2, 3, 4]);
let duplicate_entry = (vec![0, 2, 2, 5], vec![0, 5, 2, 2, 3], vec![0, 1, 2, 3, 4]); let duplicate_entry = (vec![0, 1, 2, 5], vec![1, 3, 2, 3, 3], vec![0, 1, 2, 3, 4]);
let duplicate_entry_unsorted = (vec![0, 1, 4, 5], vec![1, 3, 2, 3, 3], vec![0, 1, 2, 3, 4]);
let wrong_values_length = (vec![0, 1, 2, 5], vec![1, 3, 2, 3, 0], vec![5, 4]);
return Self { return Self {
empty_offset_array, empty_offset_array,
@ -37,8 +70,11 @@ impl InvalidCsrDataExamples {
invalid_length_of_offsets_array, invalid_length_of_offsets_array,
nonmonotonic_minor_indices, nonmonotonic_minor_indices,
nonmonotonic_offsets, nonmonotonic_offsets,
major_offset_out_of_bounds,
minor_index_out_of_bounds, minor_index_out_of_bounds,
duplicate_entry, duplicate_entry,
duplicate_entry_unsorted,
wrong_values_length,
}; };
} }
} }

View File

@ -98,7 +98,7 @@ where
matrix, matrix,
compute_u, compute_u,
compute_v, compute_v,
T::RealField::default_epsilon(), T::RealField::default_epsilon() * crate::convert(5.0),
0, 0,
) )
.unwrap() .unwrap()
@ -888,13 +888,13 @@ fn compute_2x2_uptrig_svd<T: RealField>(
v_t = Some(csv.clone()); v_t = Some(csv.clone());
} }
if compute_u {
let cu = (m11.scale(csv.c()) + m12 * csv.s()) / v1.clone(); let cu = (m11.scale(csv.c()) + m12 * csv.s()) / v1.clone();
let su = (m22 * csv.s()) / v1.clone(); let su = (m22 * csv.s()) / v1.clone();
let (csu, sgn_u) = GivensRotation::new(cu, su); let (csu, sgn_u) = GivensRotation::new(cu, su);
v1 *= sgn_u.clone(); v1 *= sgn_u.clone();
v2 *= sgn_u; v2 *= sgn_u;
if compute_u {
u = Some(csu); u = Some(csu);
} }
} }

View File

@ -460,3 +460,42 @@ fn svd_sorted() {
epsilon = 1.0e-5 epsilon = 1.0e-5
); );
} }
#[test]
// Exercises bug reported in issue #983 of nalgebra (https://github.com/dimforge/nalgebra/issues/983)
fn svd_regression_issue_983() {
let m = nalgebra::dmatrix![
10.74785316637712f64, -5.994983325167452, -6.064492921857296;
-4.149751381521569, 20.654504205822462, -4.470436210703133;
-22.772715014220207, -1.4554372570788008, 18.108113992170573
]
.transpose();
let svd1 = m.clone().svd(true, true);
let svd2 = m.clone().svd(false, true);
let svd3 = m.clone().svd(true, false);
let svd4 = m.svd(false, false);
assert_relative_eq!(svd1.singular_values, svd2.singular_values, epsilon = 1e-9);
assert_relative_eq!(svd1.singular_values, svd3.singular_values, epsilon = 1e-9);
assert_relative_eq!(svd1.singular_values, svd4.singular_values, epsilon = 1e-9);
assert_relative_eq!(
svd1.singular_values,
nalgebra::dvector![3.16188022e+01, 2.23811978e+01, 0.],
epsilon = 1e-6
);
}
#[test]
// Exercises bug reported in issue #1072 of nalgebra (https://github.com/dimforge/nalgebra/issues/1072)
fn svd_regression_issue_1072() {
let x = nalgebra::dmatrix![-6.206610118536945f64, -3.67612186839874; -1.2755730783423473, 6.047238193479124];
let mut x_svd = x.svd(true, true);
x_svd.singular_values = nalgebra::dvector![1.0, 0.0];
let y = x_svd.recompose().unwrap();
let y_svd = y.svd(true, true);
assert_relative_eq!(
y_svd.singular_values,
nalgebra::dvector![1.0, 0.0],
epsilon = 1e-9
);
}