Skip to content

Commit

Permalink
Merge pull request #997 from rust-ndarray/move-into-fixes
Browse files Browse the repository at this point in the history
move_into: Split into methods `.move_into()` and `.move_into_uninit()`.
  • Loading branch information
bluss committed May 14, 2021
2 parents cbf12d2 + dd49b77 commit 74c7994
Show file tree
Hide file tree
Showing 3 changed files with 129 additions and 17 deletions.
73 changes: 67 additions & 6 deletions src/impl_owned_array.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@

use alloc::vec::Vec;
use std::mem;
use std::mem::MaybeUninit;

use rawpointer::PointerExt;
Expand Down Expand Up @@ -33,7 +34,7 @@ impl<A> Array<A, Ix0> {
/// assert_eq!(scalar, Foo);
/// ```
pub fn into_scalar(self) -> A {
let size = ::std::mem::size_of::<A>();
let size = mem::size_of::<A>();
if size == 0 {
// Any index in the `Vec` is fine since all elements are identical.
self.data.into_vec().remove(0)
Expand Down Expand Up @@ -155,6 +156,51 @@ impl<A> Array<A, Ix2> {
impl<A, D> Array<A, D>
where D: Dimension
{
/// Move all elements from self into `new_array`, which must be of the same shape but
/// can have a different memory layout. The destination is overwritten completely.
///
/// The destination should be a mut reference to an array or an `ArrayViewMut` with
/// `A` elements.
///
/// ***Panics*** if the shapes don't agree.
///
/// ## Example
///
/// ```
/// use ndarray::Array;
///
/// // Usage example of move_into in safe code
/// let mut a = Array::default((10, 10));
/// let b = Array::from_shape_fn((10, 10), |(i, j)| (i + j).to_string());
/// b.move_into(&mut a);
/// ```
pub fn move_into<'a, AM>(self, new_array: AM)
where
AM: Into<ArrayViewMut<'a, A, D>>,
A: 'a,
{
// Remove generic parameter P and call the implementation
let new_array = new_array.into();
if mem::needs_drop::<A>() {
self.move_into_needs_drop(new_array);
} else {
// If `A` doesn't need drop, we can overwrite the destination.
// Safe because: move_into_uninit only writes initialized values
unsafe {
self.move_into_uninit(new_array.into_maybe_uninit())
}
}
}

fn move_into_needs_drop(mut self, new_array: ArrayViewMut<A, D>) {
// Simple case where `A` has a destructor: just swap values between self and new_array.
// Afterwards, `self` drops full of initialized values and dropping works as usual.
// This avoids moving out of owned values in `self` while at the same time managing
// the dropping if the values being overwritten in `new_array`.
Zip::from(&mut self).and(new_array)
.for_each(|src, dst| mem::swap(src, dst));
}

/// Move all elements from self into `new_array`, which must be of the same shape but
/// can have a different memory layout. The destination is overwritten completely.
///
Expand All @@ -167,20 +213,35 @@ impl<A, D> Array<A, D>
/// drop of any such element, other elements may be leaked.
///
/// ***Panics*** if the shapes don't agree.
pub fn move_into<'a, AM>(self, new_array: AM)
///
/// ## Example
///
/// ```
/// use ndarray::Array;
///
/// let a = Array::from_iter(0..100).into_shape((10, 10)).unwrap();
/// let mut b = Array::uninit((10, 10));
/// a.move_into_uninit(&mut b);
/// unsafe {
/// // we can now promise we have fully initialized `b`.
/// let b = b.assume_init();
/// }
/// ```
pub fn move_into_uninit<'a, AM>(self, new_array: AM)
where
AM: Into<ArrayViewMut<'a, MaybeUninit<A>, D>>,
A: 'a,
{
// Remove generic parameter P and call the implementation
// Remove generic parameter AM and call the implementation
self.move_into_impl(new_array.into())
}

fn move_into_impl(mut self, new_array: ArrayViewMut<MaybeUninit<A>, D>) {
unsafe {
// Safety: copy_to_nonoverlapping cannot panic
let guard = AbortIfPanic(&"move_into: moving out of owned value");
// Move all reachable elements
// Move all reachable elements; we move elements out of `self`.
// and thus must not panic for the whole section until we call `self.data.set_len(0)`.
Zip::from(self.raw_view_mut())
.and(new_array)
.for_each(|src, dst| {
Expand Down Expand Up @@ -208,7 +269,7 @@ impl<A, D> Array<A, D>
let data_len = self.data.len();

let has_unreachable_elements = self_len != data_len;
if !has_unreachable_elements || std::mem::size_of::<A>() == 0 {
if !has_unreachable_elements || mem::size_of::<A>() == 0 || !mem::needs_drop::<A>() {
unsafe {
self.data.set_len(0);
}
Expand Down Expand Up @@ -270,7 +331,7 @@ impl<A, D> Array<A, D>
// dummy array -> self.
// old_self elements are moved -> new_array.
let old_self = std::mem::replace(self, Self::empty());
old_self.move_into(new_array.view_mut());
old_self.move_into_uninit(new_array.view_mut());

// new_array -> self.
unsafe {
Expand Down
22 changes: 22 additions & 0 deletions src/impl_views/conversions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
// except according to those terms.

use alloc::slice;
use std::mem::MaybeUninit;

use crate::imp_prelude::*;

Expand Down Expand Up @@ -133,6 +134,27 @@ where
self.into_raw_view_mut().cast::<MathCell<A>>().deref_into_view()
}
}

/// Return the array view as a view of `MaybeUninit<A>` elements
///
/// This conversion leaves the elements as they were (presumably initialized), but
/// they are represented with the `MaybeUninit<A>` type. Effectively this means that
/// the elements can be overwritten without dropping the old element in its place.
/// (In some situations this is not what you want, while for `Copy` elements it makes
/// no difference at all.)
///
/// # Safety
///
/// This method allows writing uninitialized data into the view, which could leave any
/// original array that we borrow from in an inconsistent state. This is not allowed
/// when using the resulting array view.
pub(crate) unsafe fn into_maybe_uninit(self) -> ArrayViewMut<'a, MaybeUninit<A>, D> {
// Safe because: A and MaybeUninit<A> have the same representation;
// and we can go from initialized to (maybe) not unconditionally in terms of
// representation. However, the user must be careful to not write uninit elements
// through the view.
self.into_raw_view_mut().cast::<MaybeUninit<A>>().deref_into_view_mut()
}
}

/// Private array view methods
Expand Down
51 changes: 40 additions & 11 deletions tests/assign.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,14 +41,14 @@ fn move_into_copy() {
let a = arr2(&[[1., 2.], [3., 4.]]);
let acopy = a.clone();
let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };
assert_eq!(acopy, b);

let a = arr2(&[[1., 2.], [3., 4.]]).reversed_axes();
let acopy = a.clone();
let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };
assert_eq!(acopy, b);
}
Expand All @@ -74,7 +74,7 @@ fn move_into_owned() {

let acopy = a.clone();
let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };

assert_eq!(acopy, b);
Expand All @@ -85,7 +85,7 @@ fn move_into_owned() {

#[test]
fn move_into_slicing() {
// Count correct number of drops when using move_into and discontiguous arrays (with holes).
// Count correct number of drops when using move_into_uninit and discontiguous arrays (with holes).
for &use_f_order in &[false, true] {
for &invert_axis in &[0b00, 0b01, 0b10, 0b11] { // bitmask for axis to invert
let counter = DropCounter::default();
Expand All @@ -102,7 +102,7 @@ fn move_into_slicing() {
}

let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };

let total = m * n;
Expand All @@ -118,7 +118,7 @@ fn move_into_slicing() {

#[test]
fn move_into_diag() {
// Count correct number of drops when using move_into and discontiguous arrays (with holes).
// Count correct number of drops when using move_into_uninit and discontiguous arrays (with holes).
for &use_f_order in &[false, true] {
let counter = DropCounter::default();
{
Expand All @@ -128,7 +128,7 @@ fn move_into_diag() {
let a = a.into_diag();

let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };

let total = m * n;
Expand All @@ -143,7 +143,7 @@ fn move_into_diag() {

#[test]
fn move_into_0dim() {
// Count correct number of drops when using move_into and discontiguous arrays (with holes).
// Count correct number of drops when using move_into_uninit and discontiguous arrays (with holes).
for &use_f_order in &[false, true] {
let counter = DropCounter::default();
{
Expand All @@ -155,7 +155,7 @@ fn move_into_0dim() {

assert_eq!(a.ndim(), 0);
let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };

let total = m * n;
Expand All @@ -170,7 +170,7 @@ fn move_into_0dim() {

#[test]
fn move_into_empty() {
// Count correct number of drops when using move_into and discontiguous arrays (with holes).
// Count correct number of drops when using move_into_uninit and discontiguous arrays (with holes).
for &use_f_order in &[false, true] {
let counter = DropCounter::default();
{
Expand All @@ -181,7 +181,7 @@ fn move_into_empty() {
let a = a.slice_move(s![..0, 1..1]);
assert!(a.is_empty());
let mut b = Array::uninit(a.dim());
a.move_into(b.view_mut());
a.move_into_uninit(b.view_mut());
let b = unsafe { b.assume_init() };

let total = m * n;
Expand All @@ -194,6 +194,35 @@ fn move_into_empty() {
}
}

#[test]
fn move_into() {
// Test various memory layouts and holes while moving String elements with move_into
for &use_f_order in &[false, true] {
for &invert_axis in &[0b00, 0b01, 0b10, 0b11] { // bitmask for axis to invert
for &slice in &[false, true] {
let mut a = Array::from_shape_fn((5, 4).set_f(use_f_order),
|idx| format!("{:?}", idx));
if slice {
a.slice_collapse(s![1..-1, ..;2]);
}

if invert_axis & 0b01 != 0 {
a.invert_axis(Axis(0));
}
if invert_axis & 0b10 != 0 {
a.invert_axis(Axis(1));
}

let acopy = a.clone();
let mut b = Array::default(a.dim().set_f(!use_f_order ^ !slice));
a.move_into(&mut b);

assert_eq!(acopy, b);
}
}
}
}


/// This counter can create elements, and then count and verify
/// the number of which have actually been dropped again.
Expand Down

0 comments on commit 74c7994

Please sign in to comment.