Polymorphize array::IntoIter's iterator impl
This commit is contained in:
@@ -1,38 +1,35 @@
|
|||||||
//! Defines the `IntoIter` owned iterator for arrays.
|
//! Defines the `IntoIter` owned iterator for arrays.
|
||||||
|
|
||||||
use crate::intrinsics::transmute_unchecked;
|
use crate::intrinsics::transmute_unchecked;
|
||||||
use crate::iter::{self, FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce};
|
use crate::iter::{FusedIterator, TrustedLen, TrustedRandomAccessNoCoerce};
|
||||||
use crate::mem::MaybeUninit;
|
use crate::mem::MaybeUninit;
|
||||||
use crate::num::NonZero;
|
use crate::num::NonZero;
|
||||||
use crate::ops::{IndexRange, Range};
|
use crate::ops::{IndexRange, Range, Try};
|
||||||
use crate::{fmt, ptr};
|
use crate::{fmt, ptr};
|
||||||
|
|
||||||
|
mod iter_inner;
|
||||||
|
|
||||||
|
type InnerSized<T, const N: usize> = iter_inner::PolymorphicIter<[MaybeUninit<T>; N]>;
|
||||||
|
type InnerUnsized<T> = iter_inner::PolymorphicIter<[MaybeUninit<T>]>;
|
||||||
|
|
||||||
/// A by-value [array] iterator.
|
/// A by-value [array] iterator.
|
||||||
#[stable(feature = "array_value_iter", since = "1.51.0")]
|
#[stable(feature = "array_value_iter", since = "1.51.0")]
|
||||||
#[rustc_insignificant_dtor]
|
#[rustc_insignificant_dtor]
|
||||||
#[rustc_diagnostic_item = "ArrayIntoIter"]
|
#[rustc_diagnostic_item = "ArrayIntoIter"]
|
||||||
|
#[derive(Clone)]
|
||||||
pub struct IntoIter<T, const N: usize> {
|
pub struct IntoIter<T, const N: usize> {
|
||||||
/// This is the array we are iterating over.
|
inner: InnerSized<T, N>,
|
||||||
///
|
}
|
||||||
/// Elements with index `i` where `alive.start <= i < alive.end` have not
|
|
||||||
/// been yielded yet and are valid array entries. Elements with indices `i
|
|
||||||
/// < alive.start` or `i >= alive.end` have been yielded already and must
|
|
||||||
/// not be accessed anymore! Those dead elements might even be in a
|
|
||||||
/// completely uninitialized state!
|
|
||||||
///
|
|
||||||
/// So the invariants are:
|
|
||||||
/// - `data[alive]` is alive (i.e. contains valid elements)
|
|
||||||
/// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
|
|
||||||
/// elements were already read and must not be touched anymore!)
|
|
||||||
data: [MaybeUninit<T>; N],
|
|
||||||
|
|
||||||
/// The elements in `data` that have not been yielded yet.
|
impl<T, const N: usize> IntoIter<T, N> {
|
||||||
///
|
#[inline]
|
||||||
/// Invariants:
|
fn unsize(&self) -> &InnerUnsized<T> {
|
||||||
/// - `alive.end <= N`
|
&self.inner
|
||||||
///
|
}
|
||||||
/// (And the `IndexRange` type requires `alive.start <= alive.end`.)
|
#[inline]
|
||||||
alive: IndexRange,
|
fn unsize_mut(&mut self) -> &mut InnerUnsized<T> {
|
||||||
|
&mut self.inner
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: the `#[rustc_skip_during_method_dispatch(array)]` on `trait IntoIterator`
|
// Note: the `#[rustc_skip_during_method_dispatch(array)]` on `trait IntoIterator`
|
||||||
@@ -53,6 +50,7 @@ impl<T, const N: usize> IntoIterator for [T; N] {
|
|||||||
/// 2021 edition -- see the [array] Editions section for more information.
|
/// 2021 edition -- see the [array] Editions section for more information.
|
||||||
///
|
///
|
||||||
/// [array]: prim@array
|
/// [array]: prim@array
|
||||||
|
#[inline]
|
||||||
fn into_iter(self) -> Self::IntoIter {
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
// SAFETY: The transmute here is actually safe. The docs of `MaybeUninit`
|
// SAFETY: The transmute here is actually safe. The docs of `MaybeUninit`
|
||||||
// promise:
|
// promise:
|
||||||
@@ -68,7 +66,10 @@ impl<T, const N: usize> IntoIterator for [T; N] {
|
|||||||
// FIXME: If normal `transmute` ever gets smart enough to allow this
|
// FIXME: If normal `transmute` ever gets smart enough to allow this
|
||||||
// directly, use it instead of `transmute_unchecked`.
|
// directly, use it instead of `transmute_unchecked`.
|
||||||
let data: [MaybeUninit<T>; N] = unsafe { transmute_unchecked(self) };
|
let data: [MaybeUninit<T>; N] = unsafe { transmute_unchecked(self) };
|
||||||
IntoIter { data, alive: IndexRange::zero_to(N) }
|
// SAFETY: The original array was entirely initialized and the the alive
|
||||||
|
// range we're passing here represents that fact.
|
||||||
|
let inner = unsafe { InnerSized::new_unchecked(IndexRange::zero_to(N), data) };
|
||||||
|
IntoIter { inner }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,13 +137,16 @@ impl<T, const N: usize> IntoIter<T, N> {
|
|||||||
/// assert_eq!(r.collect::<Vec<_>>(), vec![10, 11, 12, 13, 14, 15]);
|
/// assert_eq!(r.collect::<Vec<_>>(), vec![10, 11, 12, 13, 14, 15]);
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
|
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
|
||||||
|
#[inline]
|
||||||
pub const unsafe fn new_unchecked(
|
pub const unsafe fn new_unchecked(
|
||||||
buffer: [MaybeUninit<T>; N],
|
buffer: [MaybeUninit<T>; N],
|
||||||
initialized: Range<usize>,
|
initialized: Range<usize>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
// SAFETY: one of our safety conditions is that the range is canonical.
|
// SAFETY: one of our safety conditions is that the range is canonical.
|
||||||
let alive = unsafe { IndexRange::new_unchecked(initialized.start, initialized.end) };
|
let alive = unsafe { IndexRange::new_unchecked(initialized.start, initialized.end) };
|
||||||
Self { data: buffer, alive }
|
// SAFETY: one of our safety condition is that these items are initialized.
|
||||||
|
let inner = unsafe { InnerSized::new_unchecked(alive, buffer) };
|
||||||
|
IntoIter { inner }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates an iterator over `T` which returns no elements.
|
/// Creates an iterator over `T` which returns no elements.
|
||||||
@@ -198,172 +202,134 @@ impl<T, const N: usize> IntoIter<T, N> {
|
|||||||
/// assert_eq!(get_bytes(false).collect::<Vec<_>>(), vec![]);
|
/// assert_eq!(get_bytes(false).collect::<Vec<_>>(), vec![]);
|
||||||
/// ```
|
/// ```
|
||||||
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
|
#[unstable(feature = "array_into_iter_constructors", issue = "91583")]
|
||||||
|
#[inline]
|
||||||
pub const fn empty() -> Self {
|
pub const fn empty() -> Self {
|
||||||
let buffer = [const { MaybeUninit::uninit() }; N];
|
let inner = InnerSized::empty();
|
||||||
let initialized = 0..0;
|
IntoIter { inner }
|
||||||
|
|
||||||
// SAFETY: We're telling it that none of the elements are initialized,
|
|
||||||
// which is trivially true. And ∀N: usize, 0 <= N.
|
|
||||||
unsafe { Self::new_unchecked(buffer, initialized) }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an immutable slice of all elements that have not been yielded
|
/// Returns an immutable slice of all elements that have not been yielded
|
||||||
/// yet.
|
/// yet.
|
||||||
#[stable(feature = "array_value_iter", since = "1.51.0")]
|
#[stable(feature = "array_value_iter", since = "1.51.0")]
|
||||||
|
#[inline]
|
||||||
pub fn as_slice(&self) -> &[T] {
|
pub fn as_slice(&self) -> &[T] {
|
||||||
// SAFETY: We know that all elements within `alive` are properly initialized.
|
self.unsize().as_slice()
|
||||||
unsafe {
|
|
||||||
let slice = self.data.get_unchecked(self.alive.clone());
|
|
||||||
slice.assume_init_ref()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a mutable slice of all elements that have not been yielded yet.
|
/// Returns a mutable slice of all elements that have not been yielded yet.
|
||||||
#[stable(feature = "array_value_iter", since = "1.51.0")]
|
#[stable(feature = "array_value_iter", since = "1.51.0")]
|
||||||
|
#[inline]
|
||||||
pub fn as_mut_slice(&mut self) -> &mut [T] {
|
pub fn as_mut_slice(&mut self) -> &mut [T] {
|
||||||
// SAFETY: We know that all elements within `alive` are properly initialized.
|
self.unsize_mut().as_mut_slice()
|
||||||
unsafe {
|
|
||||||
let slice = self.data.get_unchecked_mut(self.alive.clone());
|
|
||||||
slice.assume_init_mut()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
||||||
impl<T, const N: usize> Iterator for IntoIter<T, N> {
|
impl<T, const N: usize> Iterator for IntoIter<T, N> {
|
||||||
type Item = T;
|
type Item = T;
|
||||||
fn next(&mut self) -> Option<Self::Item> {
|
|
||||||
// Get the next index from the front.
|
|
||||||
//
|
|
||||||
// Increasing `alive.start` by 1 maintains the invariant regarding
|
|
||||||
// `alive`. However, due to this change, for a short time, the alive
|
|
||||||
// zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
|
|
||||||
self.alive.next().map(|idx| {
|
|
||||||
// Read the element from the array.
|
|
||||||
// SAFETY: `idx` is an index into the former "alive" region of the
|
|
||||||
// array. Reading this element means that `data[idx]` is regarded as
|
|
||||||
// dead now (i.e. do not touch). As `idx` was the start of the
|
|
||||||
// alive-zone, the alive zone is now `data[alive]` again, restoring
|
|
||||||
// all invariants.
|
|
||||||
unsafe { self.data.get_unchecked(idx).assume_init_read() }
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
#[inline]
|
||||||
let len = self.len();
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
(len, Some(len))
|
self.unsize_mut().next()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn fold<Acc, Fold>(mut self, init: Acc, mut fold: Fold) -> Acc
|
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
self.unsize().size_hint()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn fold<Acc, Fold>(mut self, init: Acc, fold: Fold) -> Acc
|
||||||
where
|
where
|
||||||
Fold: FnMut(Acc, Self::Item) -> Acc,
|
Fold: FnMut(Acc, Self::Item) -> Acc,
|
||||||
{
|
{
|
||||||
let data = &mut self.data;
|
self.unsize_mut().fold(init, fold)
|
||||||
iter::ByRefSized(&mut self.alive).fold(init, |acc, idx| {
|
|
||||||
// SAFETY: idx is obtained by folding over the `alive` range, which implies the
|
|
||||||
// value is currently considered alive but as the range is being consumed each value
|
|
||||||
// we read here will only be read once and then considered dead.
|
|
||||||
fold(acc, unsafe { data.get_unchecked(idx).assume_init_read() })
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn try_fold<B, F, R>(&mut self, init: B, f: F) -> R
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
F: FnMut(B, Self::Item) -> R,
|
||||||
|
R: Try<Output = B>,
|
||||||
|
{
|
||||||
|
self.unsize_mut().try_fold(init, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
fn count(self) -> usize {
|
fn count(self) -> usize {
|
||||||
self.len()
|
self.len()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
fn last(mut self) -> Option<Self::Item> {
|
fn last(mut self) -> Option<Self::Item> {
|
||||||
self.next_back()
|
self.next_back()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
||||||
// This also moves the start, which marks them as conceptually "dropped",
|
self.unsize_mut().advance_by(n)
|
||||||
// so if anything goes bad then our drop impl won't double-free them.
|
|
||||||
let range_to_drop = self.alive.take_prefix(n);
|
|
||||||
let remaining = n - range_to_drop.len();
|
|
||||||
|
|
||||||
// SAFETY: These elements are currently initialized, so it's fine to drop them.
|
|
||||||
unsafe {
|
|
||||||
let slice = self.data.get_unchecked_mut(range_to_drop);
|
|
||||||
slice.assume_init_drop();
|
|
||||||
}
|
|
||||||
|
|
||||||
NonZero::new(remaining).map_or(Ok(()), Err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
|
unsafe fn __iterator_get_unchecked(&mut self, idx: usize) -> Self::Item {
|
||||||
// SAFETY: The caller must provide an idx that is in bound of the remainder.
|
// SAFETY: The caller must provide an idx that is in bound of the remainder.
|
||||||
unsafe { self.data.as_ptr().add(self.alive.start()).add(idx).cast::<T>().read() }
|
let elem_ref = unsafe { self.as_mut_slice().get_unchecked_mut(idx) };
|
||||||
|
// SAFETY: We only implement `TrustedRandomAccessNoCoerce` for types
|
||||||
|
// which are actually `Copy`, so cannot have multiple-drop issues.
|
||||||
|
unsafe { ptr::read(elem_ref) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
||||||
impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
|
impl<T, const N: usize> DoubleEndedIterator for IntoIter<T, N> {
|
||||||
|
#[inline]
|
||||||
fn next_back(&mut self) -> Option<Self::Item> {
|
fn next_back(&mut self) -> Option<Self::Item> {
|
||||||
// Get the next index from the back.
|
self.unsize_mut().next_back()
|
||||||
//
|
|
||||||
// Decreasing `alive.end` by 1 maintains the invariant regarding
|
|
||||||
// `alive`. However, due to this change, for a short time, the alive
|
|
||||||
// zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
|
|
||||||
self.alive.next_back().map(|idx| {
|
|
||||||
// Read the element from the array.
|
|
||||||
// SAFETY: `idx` is an index into the former "alive" region of the
|
|
||||||
// array. Reading this element means that `data[idx]` is regarded as
|
|
||||||
// dead now (i.e. do not touch). As `idx` was the end of the
|
|
||||||
// alive-zone, the alive zone is now `data[alive]` again, restoring
|
|
||||||
// all invariants.
|
|
||||||
unsafe { self.data.get_unchecked(idx).assume_init_read() }
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[inline]
|
#[inline]
|
||||||
fn rfold<Acc, Fold>(mut self, init: Acc, mut rfold: Fold) -> Acc
|
fn rfold<Acc, Fold>(mut self, init: Acc, rfold: Fold) -> Acc
|
||||||
where
|
where
|
||||||
Fold: FnMut(Acc, Self::Item) -> Acc,
|
Fold: FnMut(Acc, Self::Item) -> Acc,
|
||||||
{
|
{
|
||||||
let data = &mut self.data;
|
self.unsize_mut().rfold(init, rfold)
|
||||||
iter::ByRefSized(&mut self.alive).rfold(init, |acc, idx| {
|
|
||||||
// SAFETY: idx is obtained by folding over the `alive` range, which implies the
|
|
||||||
// value is currently considered alive but as the range is being consumed each value
|
|
||||||
// we read here will only be read once and then considered dead.
|
|
||||||
rfold(acc, unsafe { data.get_unchecked(idx).assume_init_read() })
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn try_rfold<B, F, R>(&mut self, init: B, f: F) -> R
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
F: FnMut(B, Self::Item) -> R,
|
||||||
|
R: Try<Output = B>,
|
||||||
|
{
|
||||||
|
self.unsize_mut().try_rfold(init, f)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
||||||
// This also moves the end, which marks them as conceptually "dropped",
|
self.unsize_mut().advance_back_by(n)
|
||||||
// so if anything goes bad then our drop impl won't double-free them.
|
|
||||||
let range_to_drop = self.alive.take_suffix(n);
|
|
||||||
let remaining = n - range_to_drop.len();
|
|
||||||
|
|
||||||
// SAFETY: These elements are currently initialized, so it's fine to drop them.
|
|
||||||
unsafe {
|
|
||||||
let slice = self.data.get_unchecked_mut(range_to_drop);
|
|
||||||
slice.assume_init_drop();
|
|
||||||
}
|
|
||||||
|
|
||||||
NonZero::new(remaining).map_or(Ok(()), Err)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
||||||
impl<T, const N: usize> Drop for IntoIter<T, N> {
|
impl<T, const N: usize> Drop for IntoIter<T, N> {
|
||||||
|
#[inline]
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
// SAFETY: This is safe: `as_mut_slice` returns exactly the sub-slice
|
// `inner` now handles this, but it'd technically be a breaking change
|
||||||
// of elements that have not been moved out yet and that remain
|
// to remove this `impl`, even though it's useless.
|
||||||
// to be dropped.
|
|
||||||
unsafe { ptr::drop_in_place(self.as_mut_slice()) }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
||||||
impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {
|
impl<T, const N: usize> ExactSizeIterator for IntoIter<T, N> {
|
||||||
|
#[inline]
|
||||||
fn len(&self) -> usize {
|
fn len(&self) -> usize {
|
||||||
self.alive.len()
|
self.inner.len()
|
||||||
}
|
}
|
||||||
|
#[inline]
|
||||||
fn is_empty(&self) -> bool {
|
fn is_empty(&self) -> bool {
|
||||||
self.alive.is_empty()
|
self.inner.len() == 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -396,32 +362,9 @@ where
|
|||||||
const MAY_HAVE_SIDE_EFFECT: bool = false;
|
const MAY_HAVE_SIDE_EFFECT: bool = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
|
||||||
impl<T: Clone, const N: usize> Clone for IntoIter<T, N> {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
// Note, we don't really need to match the exact same alive range, so
|
|
||||||
// we can just clone into offset 0 regardless of where `self` is.
|
|
||||||
let mut new =
|
|
||||||
Self { data: [const { MaybeUninit::uninit() }; N], alive: IndexRange::zero_to(0) };
|
|
||||||
|
|
||||||
// Clone all alive elements.
|
|
||||||
for (src, dst) in iter::zip(self.as_slice(), &mut new.data) {
|
|
||||||
// Write a clone into the new array, then update its alive range.
|
|
||||||
// If cloning panics, we'll correctly drop the previous items.
|
|
||||||
dst.write(src.clone());
|
|
||||||
// This addition cannot overflow as we're iterating a slice
|
|
||||||
new.alive = IndexRange::zero_to(new.alive.end() + 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
new
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
#[stable(feature = "array_value_iter_impls", since = "1.40.0")]
|
||||||
impl<T: fmt::Debug, const N: usize> fmt::Debug for IntoIter<T, N> {
|
impl<T: fmt::Debug, const N: usize> fmt::Debug for IntoIter<T, N> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
// Only print the elements that were not yielded yet: we cannot
|
self.unsize().fmt(f)
|
||||||
// access the yielded elements anymore.
|
|
||||||
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
281
library/core/src/array/iter/iter_inner.rs
Normal file
281
library/core/src/array/iter/iter_inner.rs
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
//! Defines the `IntoIter` owned iterator for arrays.
|
||||||
|
|
||||||
|
use crate::mem::MaybeUninit;
|
||||||
|
use crate::num::NonZero;
|
||||||
|
use crate::ops::{IndexRange, NeverShortCircuit, Try};
|
||||||
|
use crate::{fmt, iter};
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
|
trait PartialDrop {
|
||||||
|
/// # Safety
|
||||||
|
/// `self[alive]` are all initialized before the call,
|
||||||
|
/// then are never used (without reinitializing them) after it.
|
||||||
|
unsafe fn partial_drop(&mut self, alive: IndexRange);
|
||||||
|
}
|
||||||
|
impl<T> PartialDrop for [MaybeUninit<T>] {
|
||||||
|
unsafe fn partial_drop(&mut self, alive: IndexRange) {
|
||||||
|
// SAFETY: We know that all elements within `alive` are properly initialized.
|
||||||
|
unsafe { self.get_unchecked_mut(alive).assume_init_drop() }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
impl<T, const N: usize> PartialDrop for [MaybeUninit<T>; N] {
|
||||||
|
unsafe fn partial_drop(&mut self, alive: IndexRange) {
|
||||||
|
let slice: &mut [MaybeUninit<T>] = self;
|
||||||
|
// SAFETY: Initialized elements in the array are also initialized in the slice.
|
||||||
|
unsafe { slice.partial_drop(alive) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The internals of a by-value array iterator.
|
||||||
|
///
|
||||||
|
/// The real `array::IntoIter<T, N>` stores a `PolymorphicIter<[MaybeUninit<T>, N]>`
|
||||||
|
/// which it unsizes to `PolymorphicIter<[MaybeUninit<T>]>` to iterate.
|
||||||
|
#[allow(private_bounds)]
|
||||||
|
pub(super) struct PolymorphicIter<TAIL: ?Sized>
|
||||||
|
where
|
||||||
|
TAIL: PartialDrop,
|
||||||
|
{
|
||||||
|
/// The elements in `data` that have not been yielded yet.
|
||||||
|
///
|
||||||
|
/// Invariants:
|
||||||
|
/// - `alive.end <= N`
|
||||||
|
///
|
||||||
|
/// (And the `IndexRange` type requires `alive.start <= alive.end`.)
|
||||||
|
alive: IndexRange,
|
||||||
|
|
||||||
|
/// This is the array we are iterating over.
|
||||||
|
///
|
||||||
|
/// Elements with index `i` where `alive.start <= i < alive.end` have not
|
||||||
|
/// been yielded yet and are valid array entries. Elements with indices `i
|
||||||
|
/// < alive.start` or `i >= alive.end` have been yielded already and must
|
||||||
|
/// not be accessed anymore! Those dead elements might even be in a
|
||||||
|
/// completely uninitialized state!
|
||||||
|
///
|
||||||
|
/// So the invariants are:
|
||||||
|
/// - `data[alive]` is alive (i.e. contains valid elements)
|
||||||
|
/// - `data[..alive.start]` and `data[alive.end..]` are dead (i.e. the
|
||||||
|
/// elements were already read and must not be touched anymore!)
|
||||||
|
data: TAIL,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
|
impl<TAIL: ?Sized> PolymorphicIter<TAIL>
|
||||||
|
where
|
||||||
|
TAIL: PartialDrop,
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
pub(super) const fn len(&self) -> usize {
|
||||||
|
self.alive.len()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(private_bounds)]
|
||||||
|
impl<TAIL: ?Sized> Drop for PolymorphicIter<TAIL>
|
||||||
|
where
|
||||||
|
TAIL: PartialDrop,
|
||||||
|
{
|
||||||
|
#[inline]
|
||||||
|
fn drop(&mut self) {
|
||||||
|
// SAFETY: by our type invariant `self.alive` is exactly the initialized
|
||||||
|
// items, and this is drop so nothing can use the items afterwards.
|
||||||
|
unsafe { self.data.partial_drop(self.alive.clone()) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, const N: usize> PolymorphicIter<[MaybeUninit<T>; N]> {
|
||||||
|
#[inline]
|
||||||
|
pub(super) const fn empty() -> Self {
|
||||||
|
Self { alive: IndexRange::zero_to(0), data: [const { MaybeUninit::uninit() }; N] }
|
||||||
|
}
|
||||||
|
|
||||||
|
/// # Safety
|
||||||
|
/// `data[alive]` are all initialized.
|
||||||
|
#[inline]
|
||||||
|
pub(super) const unsafe fn new_unchecked(alive: IndexRange, data: [MaybeUninit<T>; N]) -> Self {
|
||||||
|
Self { alive, data }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: Clone, const N: usize> Clone for PolymorphicIter<[MaybeUninit<T>; N]> {
|
||||||
|
#[inline]
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
// Note, we don't really need to match the exact same alive range, so
|
||||||
|
// we can just clone into offset 0 regardless of where `self` is.
|
||||||
|
let mut new = Self::empty();
|
||||||
|
|
||||||
|
fn clone_into_new<U: Clone>(
|
||||||
|
source: &PolymorphicIter<[MaybeUninit<U>]>,
|
||||||
|
target: &mut PolymorphicIter<[MaybeUninit<U>]>,
|
||||||
|
) {
|
||||||
|
// Clone all alive elements.
|
||||||
|
for (src, dst) in iter::zip(source.as_slice(), &mut target.data) {
|
||||||
|
// Write a clone into the new array, then update its alive range.
|
||||||
|
// If cloning panics, we'll correctly drop the previous items.
|
||||||
|
dst.write(src.clone());
|
||||||
|
// This addition cannot overflow as we're iterating a slice,
|
||||||
|
// the length of which always fits in usize.
|
||||||
|
target.alive = IndexRange::zero_to(target.alive.end() + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
clone_into_new(self, &mut new);
|
||||||
|
new
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> PolymorphicIter<[MaybeUninit<T>]> {
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn as_slice(&self) -> &[T] {
|
||||||
|
// SAFETY: We know that all elements within `alive` are properly initialized.
|
||||||
|
unsafe {
|
||||||
|
let slice = self.data.get_unchecked(self.alive.clone());
|
||||||
|
slice.assume_init_ref()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn as_mut_slice(&mut self) -> &mut [T] {
|
||||||
|
// SAFETY: We know that all elements within `alive` are properly initialized.
|
||||||
|
unsafe {
|
||||||
|
let slice = self.data.get_unchecked_mut(self.alive.clone());
|
||||||
|
slice.assume_init_mut()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: fmt::Debug> fmt::Debug for PolymorphicIter<[MaybeUninit<T>]> {
|
||||||
|
#[inline]
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
// Only print the elements that were not yielded yet: we cannot
|
||||||
|
// access the yielded elements anymore.
|
||||||
|
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Iterator-equivalent methods.
|
||||||
|
///
|
||||||
|
/// We don't implement the actual iterator traits because we want to implement
|
||||||
|
/// things like `try_fold` that require `Self: Sized` (which we're not).
|
||||||
|
impl<T> PolymorphicIter<[MaybeUninit<T>]> {
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn next(&mut self) -> Option<T> {
|
||||||
|
// Get the next index from the front.
|
||||||
|
//
|
||||||
|
// Increasing `alive.start` by 1 maintains the invariant regarding
|
||||||
|
// `alive`. However, due to this change, for a short time, the alive
|
||||||
|
// zone is not `data[alive]` anymore, but `data[idx..alive.end]`.
|
||||||
|
self.alive.next().map(|idx| {
|
||||||
|
// Read the element from the array.
|
||||||
|
// SAFETY: `idx` is an index into the former "alive" region of the
|
||||||
|
// array. Reading this element means that `data[idx]` is regarded as
|
||||||
|
// dead now (i.e. do not touch). As `idx` was the start of the
|
||||||
|
// alive-zone, the alive zone is now `data[alive]` again, restoring
|
||||||
|
// all invariants.
|
||||||
|
unsafe { self.data.get_unchecked(idx).assume_init_read() }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
|
||||||
|
let len = self.len();
|
||||||
|
(len, Some(len))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn advance_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
||||||
|
// This also moves the start, which marks them as conceptually "dropped",
|
||||||
|
// so if anything goes bad then our drop impl won't double-free them.
|
||||||
|
let range_to_drop = self.alive.take_prefix(n);
|
||||||
|
let remaining = n - range_to_drop.len();
|
||||||
|
|
||||||
|
// SAFETY: These elements are currently initialized, so it's fine to drop them.
|
||||||
|
unsafe {
|
||||||
|
let slice = self.data.get_unchecked_mut(range_to_drop);
|
||||||
|
slice.assume_init_drop();
|
||||||
|
}
|
||||||
|
|
||||||
|
NonZero::new(remaining).map_or(Ok(()), Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn fold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
|
||||||
|
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
|
||||||
|
where
|
||||||
|
F: FnMut(B, T) -> R,
|
||||||
|
R: Try<Output = B>,
|
||||||
|
{
|
||||||
|
// `alive` is an `IndexRange`, not an arbitrary iterator, so we can
|
||||||
|
// trust that its `try_rfold` isn't going to do something weird like
|
||||||
|
// call the fold-er multiple times for the same index.
|
||||||
|
let data = &mut self.data;
|
||||||
|
self.alive.try_fold(init, move |accum, idx| {
|
||||||
|
// SAFETY: `idx` has been removed from the alive range, so we're not
|
||||||
|
// going to drop it (even if `f` panics) and thus its ok to give
|
||||||
|
// out ownership of that item to `f` to handle.
|
||||||
|
let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
|
||||||
|
f(accum, elem)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn next_back(&mut self) -> Option<T> {
|
||||||
|
// Get the next index from the back.
|
||||||
|
//
|
||||||
|
// Decreasing `alive.end` by 1 maintains the invariant regarding
|
||||||
|
// `alive`. However, due to this change, for a short time, the alive
|
||||||
|
// zone is not `data[alive]` anymore, but `data[alive.start..=idx]`.
|
||||||
|
self.alive.next_back().map(|idx| {
|
||||||
|
// Read the element from the array.
|
||||||
|
// SAFETY: `idx` is an index into the former "alive" region of the
|
||||||
|
// array. Reading this element means that `data[idx]` is regarded as
|
||||||
|
// dead now (i.e. do not touch). As `idx` was the end of the
|
||||||
|
// alive-zone, the alive zone is now `data[alive]` again, restoring
|
||||||
|
// all invariants.
|
||||||
|
unsafe { self.data.get_unchecked(idx).assume_init_read() }
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn advance_back_by(&mut self, n: usize) -> Result<(), NonZero<usize>> {
|
||||||
|
// This also moves the end, which marks them as conceptually "dropped",
|
||||||
|
// so if anything goes bad then our drop impl won't double-free them.
|
||||||
|
let range_to_drop = self.alive.take_suffix(n);
|
||||||
|
let remaining = n - range_to_drop.len();
|
||||||
|
|
||||||
|
// SAFETY: These elements are currently initialized, so it's fine to drop them.
|
||||||
|
unsafe {
|
||||||
|
let slice = self.data.get_unchecked_mut(range_to_drop);
|
||||||
|
slice.assume_init_drop();
|
||||||
|
}
|
||||||
|
|
||||||
|
NonZero::new(remaining).map_or(Ok(()), Err)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn rfold<B>(&mut self, init: B, f: impl FnMut(B, T) -> B) -> B {
|
||||||
|
self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
pub(super) fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
|
||||||
|
where
|
||||||
|
F: FnMut(B, T) -> R,
|
||||||
|
R: Try<Output = B>,
|
||||||
|
{
|
||||||
|
// `alive` is an `IndexRange`, not an arbitrary iterator, so we can
|
||||||
|
// trust that its `try_rfold` isn't going to do something weird like
|
||||||
|
// call the fold-er multiple times for the same index.
|
||||||
|
let data = &mut self.data;
|
||||||
|
self.alive.try_rfold(init, move |accum, idx| {
|
||||||
|
// SAFETY: `idx` has been removed from the alive range, so we're not
|
||||||
|
// going to drop it (even if `f` panics) and thus its ok to give
|
||||||
|
// out ownership of that item to `f` to handle.
|
||||||
|
let elem = unsafe { data.get_unchecked(idx).assume_init_read() };
|
||||||
|
f(accum, elem)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
use crate::iter::{FusedIterator, TrustedLen};
|
use crate::iter::{FusedIterator, TrustedLen};
|
||||||
use crate::num::NonZero;
|
use crate::num::NonZero;
|
||||||
|
use crate::ops::{NeverShortCircuit, Try};
|
||||||
use crate::ub_checks;
|
use crate::ub_checks;
|
||||||
|
|
||||||
/// Like a `Range<usize>`, but with a safety invariant that `start <= end`.
|
/// Like a `Range<usize>`, but with a safety invariant that `start <= end`.
|
||||||
@@ -112,6 +113,12 @@ impl IndexRange {
|
|||||||
self.end = mid;
|
self.end = mid;
|
||||||
suffix
|
suffix
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn assume_range(&self) {
|
||||||
|
// SAFETY: This is the type invariant
|
||||||
|
unsafe { crate::hint::assert_unchecked(self.start <= self.end) }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for IndexRange {
|
impl Iterator for IndexRange {
|
||||||
@@ -138,6 +145,30 @@ impl Iterator for IndexRange {
|
|||||||
let taken = self.take_prefix(n);
|
let taken = self.take_prefix(n);
|
||||||
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
|
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn fold<B, F: FnMut(B, usize) -> B>(mut self, init: B, f: F) -> B {
|
||||||
|
self.try_fold(init, NeverShortCircuit::wrap_mut_2(f)).0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn try_fold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
F: FnMut(B, Self::Item) -> R,
|
||||||
|
R: Try<Output = B>,
|
||||||
|
{
|
||||||
|
// `Range` needs to check `start < end`, but thanks to our type invariant
|
||||||
|
// we can loop on the stricter `start != end`.
|
||||||
|
|
||||||
|
self.assume_range();
|
||||||
|
while self.start != self.end {
|
||||||
|
// SAFETY: We just checked that the range is non-empty
|
||||||
|
let i = unsafe { self.next_unchecked() };
|
||||||
|
accum = f(accum, i)?;
|
||||||
|
}
|
||||||
|
try { accum }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DoubleEndedIterator for IndexRange {
|
impl DoubleEndedIterator for IndexRange {
|
||||||
@@ -156,6 +187,30 @@ impl DoubleEndedIterator for IndexRange {
|
|||||||
let taken = self.take_suffix(n);
|
let taken = self.take_suffix(n);
|
||||||
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
|
NonZero::new(n - taken.len()).map_or(Ok(()), Err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn rfold<B, F: FnMut(B, usize) -> B>(mut self, init: B, f: F) -> B {
|
||||||
|
self.try_rfold(init, NeverShortCircuit::wrap_mut_2(f)).0
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn try_rfold<B, F, R>(&mut self, mut accum: B, mut f: F) -> R
|
||||||
|
where
|
||||||
|
Self: Sized,
|
||||||
|
F: FnMut(B, Self::Item) -> R,
|
||||||
|
R: Try<Output = B>,
|
||||||
|
{
|
||||||
|
// `Range` needs to check `start < end`, but thanks to our type invariant
|
||||||
|
// we can loop on the stricter `start != end`.
|
||||||
|
|
||||||
|
self.assume_range();
|
||||||
|
while self.start != self.end {
|
||||||
|
// SAFETY: We just checked that the range is non-empty
|
||||||
|
let i = unsafe { self.next_back_unchecked() };
|
||||||
|
accum = f(accum, i)?;
|
||||||
|
}
|
||||||
|
try { accum }
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExactSizeIterator for IndexRange {
|
impl ExactSizeIterator for IndexRange {
|
||||||
|
|||||||
@@ -11,7 +11,18 @@
|
|||||||
#[no_mangle]
|
#[no_mangle]
|
||||||
pub fn test() -> usize {
|
pub fn test() -> usize {
|
||||||
// CHECK-LABEL: @test(
|
// CHECK-LABEL: @test(
|
||||||
// CHECK: ret {{i64|i32}} 165
|
// host: ret {{i64|i32}} 165
|
||||||
|
|
||||||
|
// FIXME: Now that this autovectorizes via a masked load, it doesn't actually
|
||||||
|
// const-fold for certain widths. The `test_eight` case below shows that, yes,
|
||||||
|
// what we're emitting *can* be const-folded, except that the way LLVM does it
|
||||||
|
// for certain widths doesn't today. We should be able to put this back to
|
||||||
|
// the same check after <https://github.com/llvm/llvm-project/issues/134513>
|
||||||
|
// x86-64-v3: <i64 23, i64 16, i64 54, i64 3>
|
||||||
|
// x86-64-v3: llvm.masked.load
|
||||||
|
// x86-64-v3: %[[R:.+]] = {{.+}}llvm.vector.reduce.add.v4i64
|
||||||
|
// x86-64-v3: ret i64 %[[R]]
|
||||||
|
|
||||||
let values = [23, 16, 54, 3, 60, 9];
|
let values = [23, 16, 54, 3, 60, 9];
|
||||||
let mut acc = 0;
|
let mut acc = 0;
|
||||||
for item in values {
|
for item in values {
|
||||||
@@ -19,3 +30,15 @@ pub fn test() -> usize {
|
|||||||
}
|
}
|
||||||
acc
|
acc
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[no_mangle]
|
||||||
|
pub fn test_eight() -> usize {
|
||||||
|
// CHECK-LABEL: @test_eight(
|
||||||
|
// CHECK: ret {{i64|i32}} 220
|
||||||
|
let values = [23, 16, 54, 3, 60, 9, 13, 42];
|
||||||
|
let mut acc = 0;
|
||||||
|
for item in values {
|
||||||
|
acc += item;
|
||||||
|
}
|
||||||
|
acc
|
||||||
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user