Merge pull request #504 from GnomedDev/vec-generic-length

Implement a generic length parameter for Vec<T, N>
This commit is contained in:
Markus Reiter 2025-04-10 17:45:39 +00:00 committed by GitHub
commit fb62d12ad5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 410 additions and 214 deletions

View File

@ -48,6 +48,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
- Added `truncate` to `IndexMap`. - Added `truncate` to `IndexMap`.
- Added `get_index` and `get_index_mut` to `IndexMap`. - Added `get_index` and `get_index_mut` to `IndexMap`.
- Added `String::uDisplay`. - Added `String::uDisplay`.
- Added `LenT` generic to `Vec<T, N>` and `VecView<T>` to save memory when using a sane capacity value.
### Changed ### Changed

View File

@ -57,7 +57,7 @@ impl private::Sealed for Min {}
/// struct if you want to write code that's generic over both. /// struct if you want to write code that's generic over both.
pub struct BinaryHeapInner<T, K, S: VecStorage<T> + ?Sized> { pub struct BinaryHeapInner<T, K, S: VecStorage<T> + ?Sized> {
pub(crate) _kind: PhantomData<K>, pub(crate) _kind: PhantomData<K>,
pub(crate) data: VecInner<T, S>, pub(crate) data: VecInner<T, usize, S>,
} }
/// A priority queue implemented with a binary heap. /// A priority queue implemented with a binary heap.
@ -181,7 +181,7 @@ impl<T, K, const N: usize> BinaryHeap<T, K, N> {
impl<T, K, const N: usize> BinaryHeap<T, K, N> { impl<T, K, const N: usize> BinaryHeap<T, K, N> {
/// Returns the underlying `Vec<T,N>`. Order is arbitrary and time is *O*(1). /// Returns the underlying `Vec<T,N>`. Order is arbitrary and time is *O*(1).
pub fn into_vec(self) -> Vec<T, N> { pub fn into_vec(self) -> Vec<T, N, usize> {
self.data self.data
} }
} }

View File

@ -1,6 +1,6 @@
use crate::{ use crate::{
binary_heap::Kind as BinaryHeapKind, BinaryHeap, Deque, HistoryBuffer, IndexMap, IndexSet, binary_heap::Kind as BinaryHeapKind, len_type::LenType, BinaryHeap, Deque, HistoryBuffer,
LinearMap, String, Vec, IndexMap, IndexSet, LinearMap, String, Vec,
}; };
use core::{ use core::{
fmt, fmt,
@ -95,7 +95,7 @@ where
} }
} }
impl<'de, T, const N: usize> Deserialize<'de> for Vec<T, N> impl<'de, T, LenT: LenType, const N: usize> Deserialize<'de> for Vec<T, N, LenT>
where where
T: Deserialize<'de>, T: Deserialize<'de>,
{ {
@ -103,13 +103,14 @@ where
where where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
struct ValueVisitor<'de, T, const N: usize>(PhantomData<(&'de (), T)>); struct ValueVisitor<'de, T, LenT: LenType, const N: usize>(PhantomData<(&'de (), T, LenT)>);
impl<'de, T, const N: usize> serde::de::Visitor<'de> for ValueVisitor<'de, T, N> impl<'de, T, LenT, const N: usize> serde::de::Visitor<'de> for ValueVisitor<'de, T, LenT, N>
where where
T: Deserialize<'de>, T: Deserialize<'de>,
LenT: LenType,
{ {
type Value = Vec<T, N>; type Value = Vec<T, N, LenT>;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result { fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a sequence") formatter.write_str("a sequence")

View File

@ -1,12 +1,13 @@
//! Defmt implementations for heapless types //! Defmt implementations for heapless types
use crate::{ use crate::{
len_type::LenType,
string::{StringInner, StringStorage}, string::{StringInner, StringStorage},
vec::{VecInner, VecStorage}, vec::{VecInner, VecStorage},
}; };
use defmt::Formatter; use defmt::Formatter;
impl<T, S: VecStorage<T> + ?Sized> defmt::Format for VecInner<T, S> impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> defmt::Format for VecInner<T, LenT, S>
where where
T: defmt::Format, T: defmt::Format,
{ {

View File

@ -138,7 +138,7 @@ macro_rules! probe_loop {
} }
struct CoreMap<K, V, const N: usize> { struct CoreMap<K, V, const N: usize> {
entries: Vec<Bucket<K, V>, N>, entries: Vec<Bucket<K, V>, N, usize>,
indices: [Option<Pos>; N], indices: [Option<Pos>; N],
} }
@ -1417,7 +1417,7 @@ where
#[derive(Clone)] #[derive(Clone)]
pub struct IntoIter<K, V, const N: usize> { pub struct IntoIter<K, V, const N: usize> {
entries: Vec<Bucket<K, V>, N>, entries: Vec<Bucket<K, V>, N, usize>,
} }
impl<K, V, const N: usize> Iterator for IntoIter<K, V, N> { impl<K, V, const N: usize> Iterator for IntoIter<K, V, N> {

107
src/len_type.rs Normal file
View File

@ -0,0 +1,107 @@
use core::{
fmt::{Debug, Display},
ops::{Add, AddAssign, Sub, SubAssign},
};
pub trait Sealed:
Send
+ Sync
+ Copy
+ Display
+ Debug
+ PartialEq
+ Add<Output = Self>
+ AddAssign
+ Sub<Output = Self>
+ SubAssign
+ PartialOrd
+ TryFrom<usize, Error: Debug>
+ TryInto<usize, Error: Debug>
{
/// The zero value of the integer type.
const ZERO: Self;
/// The one value of the integer type.
const ONE: Self;
/// The maximum value of this type, as a `usize`.
const MAX: usize;
/// An infallible conversion from `usize` to `LenT`.
#[inline]
fn from_usize(val: usize) -> Self {
val.try_into().unwrap()
}
/// An infallible conversion from `LenT` to `usize`.
#[inline]
fn into_usize(self) -> usize {
self.try_into().unwrap()
}
}
macro_rules! impl_lentype {
($($(#[$meta:meta])* $LenT:ty),*) => {$(
$(#[$meta])*
impl Sealed for $LenT {
const ZERO: Self = 0;
const ONE: Self = 1;
const MAX: usize = Self::MAX as _;
}
$(#[$meta])*
impl LenType for $LenT {}
)*}
}
/// A sealed trait representing a valid type to use as a length for a container.
///
/// This cannot be implemented in user code, and is restricted to `u8`, `u16`, `u32`, and `usize`.
pub trait LenType: Sealed {}
impl_lentype!(
u8,
u16,
#[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))]
u32,
usize
);
macro_rules! impl_lentodefault {
($LenT:ty: $($len:literal),*) => {$(
impl SmallestLenType for Const<$len> {
type Type = $LenT;
}
)*};
}
/// A struct to create individual types for mapping with [`SmallestLenType`].
///
/// See the documentation of [`DefaultLenType`] for a detailed explanation.
pub struct Const<const N: usize>;
/// A trait to map [`Const`] to it's respective [`LenType`].
///
/// See the documentation of [`DefaultLenType`] for a detailed explanation.
#[diagnostic::on_unimplemented(
message = "Length `N` does not have a default `LenType` mapping",
note = "Provide the `LenType` explicitly, such as `usize`"
)]
pub trait SmallestLenType {
type Type: LenType;
}
/// A type alias to perform the `const N: usize` -> `LenType` mapping.
///
/// This is impossible to perform directly, but it is possible to write a `const N: usize` -> related `Type` mapping via a const generic argument,
/// then map from that to an unrelated type via a trait with associated types.
///
/// [`Const`] is the "related type" in the above explaination, [`SmallestLenType`] is the mapping trait.
pub type DefaultLenType<const N: usize> = <Const<N> as SmallestLenType>::Type;
impl_lentodefault!(u8: 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255);
impl_lentodefault!(u16: 256, 300, 400, 500, 512, 600, 700, 800, 900, 1000, 1024, 2000, 2048, 4000, 4096, 8000, 8192, 16000, 16384, 32000, 32768, 65000, 65535);
#[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))]
impl_lentodefault!(u32: 65536, 131072, 262144, 524288, 1048576, 2097152, 4194304, 8388608, 16777216, 33554432, 67108864, 134217728, 268435456, 536870912, 1073741824, 2147483648);
pub const fn check_capacity_fits<LenT: LenType, const N: usize>() {
assert!(LenT::MAX >= N, "The capacity is larger than `LenT` can hold, increase the size of `LenT` or reduce the capacity");
}

View File

@ -165,6 +165,7 @@ pub use indexmap::{
ValuesMut as IndexMapValuesMut, ValuesMut as IndexMapValuesMut,
}; };
pub use indexset::{FnvIndexSet, IndexSet, Iter as IndexSetIter}; pub use indexset::{FnvIndexSet, IndexSet, Iter as IndexSetIter};
pub use len_type::LenType;
pub use linear_map::LinearMap; pub use linear_map::LinearMap;
pub use string::String; pub use string::String;
@ -178,6 +179,7 @@ pub mod deque;
pub mod histbuf; pub mod histbuf;
mod indexmap; mod indexmap;
mod indexset; mod indexset;
mod len_type;
pub mod linear_map; pub mod linear_map;
mod slice; mod slice;
pub mod storage; pub mod storage;

View File

@ -89,7 +89,7 @@ pub type ViewStorage<K, V> = ViewVecStorage<(K, V)>;
/// Base struct for [`LinearMap`] and [`LinearMapView`] /// Base struct for [`LinearMap`] and [`LinearMapView`]
pub struct LinearMapInner<K, V, S: LinearMapStorage<K, V> + ?Sized> { pub struct LinearMapInner<K, V, S: LinearMapStorage<K, V> + ?Sized> {
pub(crate) buffer: VecInner<(K, V), S>, pub(crate) buffer: VecInner<(K, V), usize, S>,
} }
/// A fixed capacity map/dictionary that performs lookups via linear search. /// A fixed capacity map/dictionary that performs lookups via linear search.
@ -543,7 +543,7 @@ pub struct IntoIter<K, V, const N: usize>
where where
K: Eq, K: Eq,
{ {
inner: <Vec<(K, V), N> as IntoIterator>::IntoIter, inner: <Vec<(K, V), N, usize> as IntoIterator>::IntoIter,
} }
impl<K, V, const N: usize> Iterator for IntoIter<K, V, N> impl<K, V, const N: usize> Iterator for IntoIter<K, V, N>

View File

@ -4,6 +4,7 @@ use crate::{
binary_heap::{BinaryHeapInner, Kind as BinaryHeapKind}, binary_heap::{BinaryHeapInner, Kind as BinaryHeapKind},
deque::DequeInner, deque::DequeInner,
histbuf::{HistBufStorage, HistoryBufferInner}, histbuf::{HistBufStorage, HistoryBufferInner},
len_type::LenType,
linear_map::{LinearMapInner, LinearMapStorage}, linear_map::{LinearMapInner, LinearMapStorage},
string::{StringInner, StringStorage}, string::{StringInner, StringStorage},
vec::{VecInner, VecStorage}, vec::{VecInner, VecStorage},
@ -48,7 +49,7 @@ where
} }
} }
impl<T, St: VecStorage<T> + ?Sized> Serialize for VecInner<T, St> impl<T, LenT: LenType, St: VecStorage<T>> Serialize for VecInner<T, LenT, St>
where where
T: Serialize, T: Serialize,
{ {

View File

@ -11,8 +11,11 @@ use core::{
str::{self, Utf8Error}, str::{self, Utf8Error},
}; };
use crate::vec::{OwnedVecStorage, Vec, VecInner, ViewVecStorage};
use crate::CapacityError; use crate::CapacityError;
use crate::{
len_type::LenType,
vec::{OwnedVecStorage, Vec, VecInner, ViewVecStorage},
};
mod drain; mod drain;
pub use drain::Drain; pub use drain::Drain;
@ -131,7 +134,7 @@ pub type ViewStorage = ViewVecStorage<u8>;
/// In most cases you should use [`String`] or [`StringView`] directly. Only use this /// In most cases you should use [`String`] or [`StringView`] directly. Only use this
/// struct if you want to write code that's generic over both. /// struct if you want to write code that's generic over both.
pub struct StringInner<S: StringStorage + ?Sized> { pub struct StringInner<S: StringStorage + ?Sized> {
vec: VecInner<u8, S>, vec: VecInner<u8, usize, S>,
} }
/// A fixed capacity [`String`](https://doc.rust-lang.org/std/string/struct.String.html). /// A fixed capacity [`String`](https://doc.rust-lang.org/std/string/struct.String.html).
@ -229,9 +232,11 @@ impl<const N: usize> String<N> {
/// # Ok::<(), core::str::Utf8Error>(()) /// # Ok::<(), core::str::Utf8Error>(())
/// ``` /// ```
#[inline] #[inline]
pub fn from_utf8(vec: Vec<u8, N>) -> Result<Self, Utf8Error> { pub fn from_utf8<LenT: LenType>(vec: Vec<u8, N, LenT>) -> Result<Self, Utf8Error> {
core::str::from_utf8(&vec)?; core::str::from_utf8(&vec)?;
Ok(Self { vec })
// SAFETY: UTF-8 invariant has just been checked by `str::from_utf8`.
Ok(unsafe { Self::from_utf8_unchecked(vec) })
} }
/// Convert UTF-8 bytes into a `String`, without checking that the string /// Convert UTF-8 bytes into a `String`, without checking that the string
@ -256,8 +261,10 @@ impl<const N: usize> String<N> {
/// assert_eq!("💖", sparkle_heart); /// assert_eq!("💖", sparkle_heart);
/// ``` /// ```
#[inline] #[inline]
pub const unsafe fn from_utf8_unchecked(vec: Vec<u8, N>) -> Self { pub unsafe fn from_utf8_unchecked<LenT: LenType>(vec: Vec<u8, N, LenT>) -> Self {
Self { vec } Self {
vec: vec.cast_len_type(),
}
} }
/// Converts a `String` into a byte vector. /// Converts a `String` into a byte vector.
@ -279,7 +286,7 @@ impl<const N: usize> String<N> {
/// # Ok::<(), heapless::CapacityError>(()) /// # Ok::<(), heapless::CapacityError>(())
/// ``` /// ```
#[inline] #[inline]
pub fn into_bytes(self) -> Vec<u8, N> { pub fn into_bytes(self) -> Vec<u8, N, usize> {
self.vec self.vec
} }
} }
@ -457,7 +464,7 @@ impl<S: StringStorage + ?Sized> StringInner<S> {
/// assert_eq!(s, "olleh"); /// assert_eq!(s, "olleh");
/// # Ok::<(), heapless::CapacityError>(()) /// # Ok::<(), heapless::CapacityError>(())
/// ``` /// ```
pub unsafe fn as_mut_vec(&mut self) -> &mut VecInner<u8, S> { pub unsafe fn as_mut_vec(&mut self) -> &mut VecInner<u8, usize, S> {
&mut self.vec &mut self.vec
} }
@ -1068,7 +1075,7 @@ mod tests {
#[test] #[test]
fn into_bytes() { fn into_bytes() {
let s: String<4> = String::try_from("ab").unwrap(); let s: String<4> = String::try_from("ab").unwrap();
let b: Vec<u8, 4> = s.into_bytes(); let b: Vec<u8, 4, usize> = s.into_bytes();
assert_eq!(b.len(), 2); assert_eq!(b.len(), 2);
assert_eq!(b"ab", &b[..]); assert_eq!(b"ab", &b[..]);
} }

View File

@ -1,4 +1,5 @@
use crate::{ use crate::{
len_type::LenType,
string::{StringInner, StringStorage}, string::{StringInner, StringStorage},
vec::{VecInner, VecStorage}, vec::{VecInner, VecStorage},
CapacityError, CapacityError,
@ -24,7 +25,7 @@ impl<S: StringStorage + ?Sized> uWrite for StringInner<S> {
} }
} }
impl<S: VecStorage<u8> + ?Sized> uWrite for VecInner<u8, S> { impl<LenT: LenType, S: VecStorage<u8> + ?Sized> uWrite for VecInner<u8, LenT, S> {
type Error = CapacityError; type Error = CapacityError;
#[inline] #[inline]
fn write_str(&mut self, s: &str) -> Result<(), Self::Error> { fn write_str(&mut self, s: &str) -> Result<(), Self::Error> {

View File

@ -6,38 +6,31 @@ use core::{
slice, slice,
}; };
use crate::len_type::LenType;
use super::VecView; use super::VecView;
/// A draining iterator for [`Vec`](super::Vec). /// A draining iterator for [`Vec`](super::Vec).
/// ///
/// This `struct` is created by [`Vec::drain`](super::Vec::drain). /// This `struct` is created by [`Vec::drain`](super::Vec::drain).
/// See its documentation for more. /// See its documentation for more.
/// pub struct Drain<'a, T: 'a, LenT: LenType> {
/// # Example
///
/// ```
/// use heapless::{vec, Vec};
///
/// let mut v = Vec::<_, 4>::from_array([0, 1, 2]);
/// let iter: vec::Drain<'_, _> = v.drain(..);
/// ```
pub struct Drain<'a, T: 'a> {
/// Index of tail to preserve /// Index of tail to preserve
pub(super) tail_start: usize, pub(super) tail_start: LenT,
/// Length of tail /// Length of tail
pub(super) tail_len: usize, pub(super) tail_len: LenT,
/// Current remaining range to remove /// Current remaining range to remove
pub(super) iter: slice::Iter<'a, T>, pub(super) iter: slice::Iter<'a, T>,
pub(super) vec: NonNull<VecView<T>>, pub(super) vec: NonNull<VecView<T, LenT>>,
} }
impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> { impl<T: fmt::Debug, LenT: LenType> fmt::Debug for Drain<'_, T, LenT> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish() f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
} }
} }
impl<T> Drain<'_, T> { impl<T, LenT: LenType> Drain<'_, T, LenT> {
/// Returns the remaining items of this iterator as a slice. /// Returns the remaining items of this iterator as a slice.
/// ///
/// # Examples /// # Examples
@ -57,16 +50,16 @@ impl<T> Drain<'_, T> {
} }
} }
impl<T> AsRef<[T]> for Drain<'_, T> { impl<T, LenT: LenType> AsRef<[T]> for Drain<'_, T, LenT> {
fn as_ref(&self) -> &[T] { fn as_ref(&self) -> &[T] {
self.as_slice() self.as_slice()
} }
} }
unsafe impl<T: Sync> Sync for Drain<'_, T> {} unsafe impl<T: Sync, LenT: LenType> Sync for Drain<'_, T, LenT> {}
unsafe impl<T: Send> Send for Drain<'_, T> {} unsafe impl<T: Send, LenT: LenType> Send for Drain<'_, T, LenT> {}
impl<T> Iterator for Drain<'_, T> { impl<T, LenT: LenType> Iterator for Drain<'_, T, LenT> {
type Item = T; type Item = T;
#[inline] #[inline]
@ -81,7 +74,7 @@ impl<T> Iterator for Drain<'_, T> {
} }
} }
impl<T> DoubleEndedIterator for Drain<'_, T> { impl<T, LenT: LenType> DoubleEndedIterator for Drain<'_, T, LenT> {
#[inline] #[inline]
fn next_back(&mut self) -> Option<T> { fn next_back(&mut self) -> Option<T> {
self.iter self.iter
@ -90,25 +83,26 @@ impl<T> DoubleEndedIterator for Drain<'_, T> {
} }
} }
impl<T> Drop for Drain<'_, T> { impl<T, LenT: LenType> Drop for Drain<'_, T, LenT> {
fn drop(&mut self) { fn drop(&mut self) {
/// Moves back the un-`Drain`ed elements to restore the original `Vec`. /// Moves back the un-`Drain`ed elements to restore the original `Vec`.
struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>); struct DropGuard<'r, 'a, T, LenT: LenType>(&'r mut Drain<'a, T, LenT>);
impl<T> Drop for DropGuard<'_, '_, T> { impl<T, LenT: LenType> Drop for DropGuard<'_, '_, T, LenT> {
fn drop(&mut self) { fn drop(&mut self) {
if self.0.tail_len > 0 { if self.0.tail_len > LenT::ZERO {
unsafe { unsafe {
let source_vec = self.0.vec.as_mut(); let source_vec = self.0.vec.as_mut();
// memmove back untouched tail, update to new length // memmove back untouched tail, update to new length
let start = source_vec.len(); let start = source_vec.len();
let tail = self.0.tail_start; let tail = self.0.tail_start.into_usize();
let tail_len = self.0.tail_len.into_usize();
if tail != start { if tail != start {
let dst = source_vec.as_mut_ptr().add(start); let dst = source_vec.as_mut_ptr().add(start);
let src = source_vec.as_ptr().add(tail); let src = source_vec.as_ptr().add(tail);
ptr::copy(src, dst, self.0.tail_len); ptr::copy(src, dst, tail_len);
} }
source_vec.set_len(start + self.0.tail_len); source_vec.set_len(start + tail_len);
} }
} }
} }
@ -125,8 +119,9 @@ impl<T> Drop for Drain<'_, T> {
unsafe { unsafe {
let vec = vec.as_mut(); let vec = vec.as_mut();
let old_len = vec.len(); let old_len = vec.len();
vec.set_len(old_len + drop_len + self.tail_len); let tail_len = self.tail_len.into_usize();
vec.truncate(old_len + self.tail_len); vec.set_len(old_len + drop_len + tail_len);
vec.truncate(old_len + tail_len);
} }
return; return;
@ -159,9 +154,9 @@ impl<T> Drop for Drain<'_, T> {
} }
} }
impl<T> ExactSizeIterator for Drain<'_, T> {} impl<T, LenT: LenType> ExactSizeIterator for Drain<'_, T, LenT> {}
impl<T> FusedIterator for Drain<'_, T> {} impl<T, LenT: LenType> FusedIterator for Drain<'_, T, LenT> {}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {

View File

@ -11,6 +11,7 @@ use core::{
slice, slice,
}; };
use crate::len_type::{check_capacity_fits, DefaultLenType, LenType};
use crate::CapacityError; use crate::CapacityError;
mod drain; mod drain;
@ -21,6 +22,7 @@ mod storage {
use crate::{ use crate::{
binary_heap::{BinaryHeapInner, BinaryHeapView}, binary_heap::{BinaryHeapInner, BinaryHeapView},
deque::{DequeInner, DequeView}, deque::{DequeInner, DequeView},
len_type::LenType,
}; };
use super::{VecInner, VecView}; use super::{VecInner, VecView};
@ -56,10 +58,12 @@ mod storage {
fn borrow(&self) -> &[MaybeUninit<T>]; fn borrow(&self) -> &[MaybeUninit<T>];
fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>]; fn borrow_mut(&mut self) -> &mut [MaybeUninit<T>];
fn as_vec_view(this: &VecInner<T, Self>) -> &VecView<T> fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
where where
Self: VecStorage<T>; Self: VecStorage<T>;
fn as_vec_mut_view(this: &mut VecInner<T, Self>) -> &mut VecView<T> fn as_vec_mut_view<LenT: LenType>(
this: &mut VecInner<T, LenT, Self>,
) -> &mut VecView<T, LenT>
where where
Self: VecStorage<T>; Self: VecStorage<T>;
@ -98,13 +102,15 @@ mod storage {
&mut self.buffer &mut self.buffer
} }
fn as_vec_view(this: &VecInner<T, Self>) -> &VecView<T> fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
where where
Self: VecStorage<T>, Self: VecStorage<T>,
{ {
this this
} }
fn as_vec_mut_view(this: &mut VecInner<T, Self>) -> &mut VecView<T> fn as_vec_mut_view<LenT: LenType>(
this: &mut VecInner<T, LenT, Self>,
) -> &mut VecView<T, LenT>
where where
Self: VecStorage<T>, Self: VecStorage<T>,
{ {
@ -148,13 +154,15 @@ mod storage {
&mut self.buffer &mut self.buffer
} }
fn as_vec_view(this: &VecInner<T, Self>) -> &VecView<T> fn as_vec_view<LenT: LenType>(this: &VecInner<T, LenT, Self>) -> &VecView<T, LenT>
where where
Self: VecStorage<T>, Self: VecStorage<T>,
{ {
this this
} }
fn as_vec_mut_view(this: &mut VecInner<T, Self>) -> &mut VecView<T> fn as_vec_mut_view<LenT: LenType>(
this: &mut VecInner<T, LenT, Self>,
) -> &mut VecView<T, LenT>
where where
Self: VecStorage<T>, Self: VecStorage<T>,
{ {
@ -200,9 +208,9 @@ pub use drain::Drain;
/// ///
/// In most cases you should use [`Vec`] or [`VecView`] directly. Only use this /// In most cases you should use [`Vec`] or [`VecView`] directly. Only use this
/// struct if you want to write code that's generic over both. /// struct if you want to write code that's generic over both.
pub struct VecInner<T, S: VecStorage<T> + ?Sized> { pub struct VecInner<T, LenT: LenType, S: VecStorage<T> + ?Sized> {
phantom: PhantomData<T>, phantom: PhantomData<T>,
len: usize, len: LenT,
buffer: S, buffer: S,
} }
@ -241,9 +249,14 @@ pub struct VecInner<T, S: VecStorage<T> + ?Sized> {
/// use heapless::{Vec, VecView}; /// use heapless::{Vec, VecView};
/// ///
/// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap(); /// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
/// let view: &VecView<_> = &vec; /// let view: &VecView<_, _> = &vec;
/// ``` /// ```
pub type Vec<T, const N: usize> = VecInner<T, OwnedVecStorage<T, N>>; ///
/// For uncommmon capacity values, or in generic scenarios, you may have to provide the `LenT` generic yourself.
///
/// This should be the smallest unsigned integer type that your capacity fits in, or `usize` if you don't want to consider this.
pub type Vec<T, const N: usize, LenT = DefaultLenType<N>> =
VecInner<T, LenT, OwnedVecStorage<T, N>>;
/// A [`Vec`] with dynamic capacity /// A [`Vec`] with dynamic capacity
/// ///
@ -259,16 +272,16 @@ pub type Vec<T, const N: usize> = VecInner<T, OwnedVecStorage<T, N>>;
/// use heapless::{Vec, VecView}; /// use heapless::{Vec, VecView};
/// ///
/// let mut vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap(); /// let mut vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
/// let view: &VecView<_> = &vec; /// let view: &VecView<_, _> = &vec;
/// assert_eq!(view, &[1, 2, 3, 4]); /// assert_eq!(view, &[1, 2, 3, 4]);
/// ///
/// let mut_view: &mut VecView<_> = &mut vec; /// let mut_view: &mut VecView<_, _> = &mut vec;
/// mut_view.push(5); /// mut_view.push(5);
/// assert_eq!(vec, [1, 2, 3, 4, 5]); /// assert_eq!(vec, [1, 2, 3, 4, 5]);
/// ``` /// ```
pub type VecView<T> = VecInner<T, ViewVecStorage<T>>; pub type VecView<T, LenT = usize> = VecInner<T, LenT, ViewVecStorage<T>>;
impl<T, const N: usize> Vec<T, N> { impl<T, LenT: LenType, const N: usize> Vec<T, N, LenT> {
const ELEM: MaybeUninit<T> = MaybeUninit::uninit(); const ELEM: MaybeUninit<T> = MaybeUninit::uninit();
const INIT: [MaybeUninit<T>; N] = [Self::ELEM; N]; // important for optimization of `new` const INIT: [MaybeUninit<T>; N] = [Self::ELEM; N]; // important for optimization of `new`
@ -286,9 +299,11 @@ impl<T, const N: usize> Vec<T, N> {
/// static mut X: Vec<u8, 16> = Vec::new(); /// static mut X: Vec<u8, 16> = Vec::new();
/// ``` /// ```
pub const fn new() -> Self { pub const fn new() -> Self {
const { check_capacity_fits::<LenT, N>() }
Self { Self {
phantom: PhantomData, phantom: PhantomData,
len: 0, len: LenT::ZERO,
buffer: VecStorageInner { buffer: Self::INIT }, buffer: VecStorageInner { buffer: Self::INIT },
} }
} }
@ -333,7 +348,7 @@ impl<T, const N: usize> Vec<T, N> {
if N == M { if N == M {
Self { Self {
phantom: PhantomData, phantom: PhantomData,
len: N, len: LenT::from_usize(N),
// NOTE(unsafe) ManuallyDrop<[T; M]> and [MaybeUninit<T>; N] // NOTE(unsafe) ManuallyDrop<[T; M]> and [MaybeUninit<T>; N]
// have the same layout when N == M. // have the same layout when N == M.
buffer: unsafe { mem::transmute_copy(&src) }, buffer: unsafe { mem::transmute_copy(&src) },
@ -347,7 +362,7 @@ impl<T, const N: usize> Vec<T, N> {
dst_elem.write(unsafe { ptr::read(src_elem) }); dst_elem.write(unsafe { ptr::read(src_elem) });
} }
v.len = M; unsafe { v.set_len(M) };
v v
} }
} }
@ -392,9 +407,27 @@ impl<T, const N: usize> Vec<T, N> {
} }
new new
} }
/// Casts the `LenT` type to a new type, preserving everything else about the vector.
///
/// This can be useful if you need to pass a `Vec<T, N, u8>` into a `Vec<T, N, usize>` for example.
///
/// This will check at compile time if the `N` value will fit into `NewLenT`, and error if not.
pub fn cast_len_type<NewLenT: LenType>(self) -> Vec<T, N, NewLenT> {
const { check_capacity_fits::<NewLenT, N>() }
let this = ManuallyDrop::new(self);
// SAFETY: Pointer argument is derived from a reference, meeting the safety documented invariants.
// This also prevents double drops by wrapping `self` in `ManuallyDrop`.
Vec {
len: NewLenT::from_usize(this.len()),
buffer: unsafe { ptr::read(&this.buffer) },
phantom: PhantomData,
}
}
} }
impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> VecInner<T, LenT, S> {
/// Removes the specified range from the vector in bulk, returning all /// Removes the specified range from the vector in bulk, returning all
/// removed elements as an iterator. If the iterator is dropped before /// removed elements as an iterator. If the iterator is dropped before
/// being fully consumed, it drops the remaining removed elements. /// being fully consumed, it drops the remaining removed elements.
@ -427,7 +460,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// v.drain(..); /// v.drain(..);
/// assert_eq!(v, &[]); /// assert_eq!(v, &[]);
/// ``` /// ```
pub fn drain<R>(&mut self, range: R) -> Drain<'_, T> pub fn drain<R>(&mut self, range: R) -> Drain<'_, T, LenT>
where where
R: RangeBounds<usize>, R: RangeBounds<usize>,
{ {
@ -450,8 +483,8 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
let vec = NonNull::from(self.as_mut_view()); let vec = NonNull::from(self.as_mut_view());
let range_slice = slice::from_raw_parts(vec.as_ref().as_ptr().add(start), end - start); let range_slice = slice::from_raw_parts(vec.as_ref().as_ptr().add(start), end - start);
Drain { Drain {
tail_start: end, tail_start: LenT::from_usize(end),
tail_len: len - end, tail_len: LenT::from_usize(len - end),
iter: range_slice.iter(), iter: range_slice.iter(),
vec, vec,
} }
@ -464,7 +497,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// ```rust /// ```rust
/// # use heapless::{Vec, VecView}; /// # use heapless::{Vec, VecView};
/// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap(); /// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
/// let view: &VecView<u8> = vec.as_view(); /// let view: &VecView<u8, _> = vec.as_view();
/// ``` /// ```
/// ///
/// It is often preferable to do the same through type coerction, since `Vec<T, N>` implements `Unsize<VecView<T>>`: /// It is often preferable to do the same through type coerction, since `Vec<T, N>` implements `Unsize<VecView<T>>`:
@ -472,10 +505,10 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// ```rust /// ```rust
/// # use heapless::{Vec, VecView}; /// # use heapless::{Vec, VecView};
/// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap(); /// let vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
/// let view: &VecView<u8> = &vec; /// let view: &VecView<u8, _> = &vec;
/// ``` /// ```
#[inline] #[inline]
pub fn as_view(&self) -> &VecView<T> { pub fn as_view(&self) -> &VecView<T, LenT> {
S::as_vec_view(self) S::as_vec_view(self)
} }
@ -483,19 +516,19 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// ///
/// ```rust /// ```rust
/// # use heapless::{Vec, VecView}; /// # use heapless::{Vec, VecView};
/// let mut vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap(); /// let mut vec: Vec<u8, 10, u8> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
/// let view: &mut VecView<u8> = vec.as_mut_view(); /// let view: &mut VecView<u8, _> = vec.as_mut_view();
/// ``` /// ```
/// ///
/// It is often preferable to do the same through type coerction, since `Vec<T, N>` implements `Unsize<VecView<T>>`: /// It is often preferable to do the same through type coerction, since `Vec<T, N>` implements `Unsize<VecView<T>>`:
/// ///
/// ```rust /// ```rust
/// # use heapless::{Vec, VecView}; /// # use heapless::{Vec, VecView};
/// let mut vec: Vec<u8, 10> = Vec::from_slice(&[1, 2, 3, 4]).unwrap(); /// let mut vec: Vec<u8, 10, u8> = Vec::from_slice(&[1, 2, 3, 4]).unwrap();
/// let view: &mut VecView<u8> = &mut vec; /// let view: &mut VecView<u8, _> = &mut vec;
/// ``` /// ```
#[inline] #[inline]
pub fn as_mut_view(&mut self) -> &mut VecView<T> { pub fn as_mut_view(&mut self) -> &mut VecView<T, LenT> {
S::as_vec_mut_view(self) S::as_vec_mut_view(self)
} }
@ -523,7 +556,12 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
pub fn as_slice(&self) -> &[T] { pub fn as_slice(&self) -> &[T] {
// NOTE(unsafe) avoid bound checks in the slicing operation // NOTE(unsafe) avoid bound checks in the slicing operation
// &buffer[..self.len] // &buffer[..self.len]
unsafe { slice::from_raw_parts(self.buffer.borrow().as_ptr().cast::<T>(), self.len) } unsafe {
slice::from_raw_parts(
self.buffer.borrow().as_ptr().cast::<T>(),
self.len.into_usize(),
)
}
} }
/// Extracts a mutable slice containing the entire vector. /// Extracts a mutable slice containing the entire vector.
@ -543,7 +581,10 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
// NOTE(unsafe) avoid bound checks in the slicing operation // NOTE(unsafe) avoid bound checks in the slicing operation
// &mut buffer[..self.len] // &mut buffer[..self.len]
unsafe { unsafe {
slice::from_raw_parts_mut(self.buffer.borrow_mut().as_mut_ptr().cast::<T>(), self.len) slice::from_raw_parts_mut(
self.buffer.borrow_mut().as_mut_ptr().cast::<T>(),
self.len.into_usize(),
)
} }
} }
@ -590,21 +631,23 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
where where
T: Clone, T: Clone,
{ {
pub fn extend_from_slice_inner<T>( pub fn extend_from_slice_inner<T, LenT: LenType>(
len: &mut usize, len: &mut LenT,
buf: &mut [MaybeUninit<T>], buf: &mut [MaybeUninit<T>],
other: &[T], other: &[T],
) -> Result<(), CapacityError> ) -> Result<(), CapacityError>
where where
T: Clone, T: Clone,
{ {
if *len + other.len() > buf.len() { if len.into_usize() + other.len() > buf.len() {
// won't fit in the `Vec`; don't modify anything and return an error // won't fit in the `Vec`; don't modify anything and return an error
Err(CapacityError) Err(CapacityError)
} else { } else {
for elem in other { for elem in other {
unsafe { *buf.get_unchecked_mut(*len) = MaybeUninit::new(elem.clone()) } unsafe {
*len += 1; *buf.get_unchecked_mut(len.into_usize()) = MaybeUninit::new(elem.clone());
}
*len += LenT::ONE;
} }
Ok(()) Ok(())
} }
@ -615,10 +658,10 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// Removes the last element from a vector and returns it, or `None` if it's empty /// Removes the last element from a vector and returns it, or `None` if it's empty
pub fn pop(&mut self) -> Option<T> { pub fn pop(&mut self) -> Option<T> {
if self.len != 0 { if self.len == LenT::ZERO {
Some(unsafe { self.pop_unchecked() })
} else {
None None
} else {
Some(unsafe { self.pop_unchecked() })
} }
} }
@ -626,7 +669,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// ///
/// Returns back the `item` if the vector is full. /// Returns back the `item` if the vector is full.
pub fn push(&mut self, item: T) -> Result<(), T> { pub fn push(&mut self, item: T) -> Result<(), T> {
if self.len < self.capacity() { if self.len() < self.capacity() {
unsafe { self.push_unchecked(item) } unsafe { self.push_unchecked(item) }
Ok(()) Ok(())
} else { } else {
@ -642,10 +685,10 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
pub unsafe fn pop_unchecked(&mut self) -> T { pub unsafe fn pop_unchecked(&mut self) -> T {
debug_assert!(!self.is_empty()); debug_assert!(!self.is_empty());
self.len -= 1; self.len -= LenT::ONE;
self.buffer self.buffer
.borrow_mut() .borrow_mut()
.get_unchecked_mut(self.len) .get_unchecked_mut(self.len.into_usize())
.as_ptr() .as_ptr()
.read() .read()
} }
@ -660,9 +703,12 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
// use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory // use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory
debug_assert!(!self.is_full()); debug_assert!(!self.is_full());
*self.buffer.borrow_mut().get_unchecked_mut(self.len) = MaybeUninit::new(item); *self
.buffer
.borrow_mut()
.get_unchecked_mut(self.len.into_usize()) = MaybeUninit::new(item);
self.len += 1; self.len += LenT::ONE;
} }
/// Shortens the vector, keeping the first `len` elements and dropping the rest. /// Shortens the vector, keeping the first `len` elements and dropping the rest.
@ -678,12 +724,12 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
// Note: It's intentional that this is `>` and not `>=`. // Note: It's intentional that this is `>` and not `>=`.
// Changing it to `>=` has negative performance // Changing it to `>=` has negative performance
// implications in some cases. See rust-lang/rust#78884 for more. // implications in some cases. See rust-lang/rust#78884 for more.
if len > self.len { if len > self.len() {
return; return;
} }
let remaining_len = self.len - len; let remaining_len = self.len() - len;
let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
self.len = len; self.len = LenT::from_usize(len);
ptr::drop_in_place(s); ptr::drop_in_place(s);
} }
} }
@ -703,8 +749,8 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
return Err(CapacityError); return Err(CapacityError);
} }
if new_len > self.len { if new_len > self.len() {
while self.len < new_len { while self.len() < new_len {
self.push(value.clone()).ok(); self.push(value.clone()).ok();
} }
} else { } else {
@ -820,7 +866,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
pub unsafe fn set_len(&mut self, new_len: usize) { pub unsafe fn set_len(&mut self, new_len: usize) {
debug_assert!(new_len <= self.capacity()); debug_assert!(new_len <= self.capacity());
self.len = new_len; self.len = LenT::from_usize(new_len);
} }
/// Removes an element from the vector and returns it. /// Removes an element from the vector and returns it.
@ -851,7 +897,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// assert_eq!(&*v, ["baz", "qux"]); /// assert_eq!(&*v, ["baz", "qux"]);
/// ``` /// ```
pub fn swap_remove(&mut self, index: usize) -> T { pub fn swap_remove(&mut self, index: usize) -> T {
assert!(index < self.len); assert!(index < self.len());
unsafe { self.swap_remove_unchecked(index) } unsafe { self.swap_remove_unchecked(index) }
} }
@ -888,18 +934,18 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
let value = ptr::read(self.as_ptr().add(index)); let value = ptr::read(self.as_ptr().add(index));
let base_ptr = self.as_mut_ptr(); let base_ptr = self.as_mut_ptr();
ptr::copy(base_ptr.add(length - 1), base_ptr.add(index), 1); ptr::copy(base_ptr.add(length - 1), base_ptr.add(index), 1);
self.len -= 1; self.len -= LenT::ONE;
value value
} }
/// Returns true if the vec is full /// Returns true if the vec is full
pub fn is_full(&self) -> bool { pub fn is_full(&self) -> bool {
self.len == self.capacity() self.len() == self.capacity()
} }
/// Returns true if the vec is empty /// Returns true if the vec is empty
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.len == 0 self.len == LenT::ZERO
} }
/// Returns `true` if `needle` is a prefix of the Vec. /// Returns `true` if `needle` is a prefix of the Vec.
@ -921,7 +967,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
T: PartialEq, T: PartialEq,
{ {
let n = needle.len(); let n = needle.len();
self.len >= n && needle == &self[..n] self.len() >= n && needle == &self[..n]
} }
/// Returns `true` if `needle` is a suffix of the Vec. /// Returns `true` if `needle` is a suffix of the Vec.
@ -1107,7 +1153,7 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
where where
F: FnMut(&mut T) -> bool, F: FnMut(&mut T) -> bool,
{ {
let original_len = self.len(); let original_len = self.len;
// Avoid double drop if the drop guard is not executed, // Avoid double drop if the drop guard is not executed,
// since we may make some holes during the process. // since we may make some holes during the process.
unsafe { self.set_len(0) }; unsafe { self.set_len(0) };
@ -1123,56 +1169,57 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
// This drop guard will be invoked when predicate or `drop` of element panicked. // This drop guard will be invoked when predicate or `drop` of element panicked.
// It shifts unchecked elements to cover holes and `set_len` to the correct length. // It shifts unchecked elements to cover holes and `set_len` to the correct length.
// In cases when predicate and `drop` never panick, it will be optimized out. // In cases when predicate and `drop` never panick, it will be optimized out.
struct BackshiftOnDrop<'a, T, S: VecStorage<T> + ?Sized> { struct BackshiftOnDrop<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> {
v: &'a mut VecInner<T, S>, v: &'a mut VecInner<T, LenT, S>,
processed_len: usize, processed_len: LenT,
deleted_cnt: usize, deleted_cnt: LenT,
original_len: usize, original_len: LenT,
} }
impl<T, S: VecStorage<T> + ?Sized> Drop for BackshiftOnDrop<'_, T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Drop for BackshiftOnDrop<'_, T, LenT, S> {
fn drop(&mut self) { fn drop(&mut self) {
if self.deleted_cnt > 0 { if self.deleted_cnt > LenT::ZERO {
// SAFETY: Trailing unchecked items must be valid since we never touch them. // SAFETY: Trailing unchecked items must be valid since we never touch them.
unsafe { unsafe {
ptr::copy( ptr::copy(
self.v.as_ptr().add(self.processed_len), self.v.as_ptr().add(self.processed_len.into_usize()),
self.v self.v
.as_mut_ptr() .as_mut_ptr()
.add(self.processed_len - self.deleted_cnt), .add((self.processed_len - self.deleted_cnt).into_usize()),
self.original_len - self.processed_len, (self.original_len - self.processed_len).into_usize(),
); );
} }
} }
// SAFETY: After filling holes, all items are in contiguous memory. // SAFETY: After filling holes, all items are in contiguous memory.
unsafe { unsafe {
self.v.set_len(self.original_len - self.deleted_cnt); self.v
.set_len((self.original_len - self.deleted_cnt).into_usize());
} }
} }
} }
let mut g = BackshiftOnDrop { let mut g = BackshiftOnDrop {
v: self, v: self,
processed_len: 0, processed_len: LenT::ZERO,
deleted_cnt: 0, deleted_cnt: LenT::ZERO,
original_len, original_len,
}; };
fn process_loop<F, T, S: VecStorage<T> + ?Sized, const DELETED: bool>( fn process_loop<F, T, LenT: LenType, S: VecStorage<T> + ?Sized, const DELETED: bool>(
original_len: usize, original_len: LenT,
f: &mut F, f: &mut F,
g: &mut BackshiftOnDrop<'_, T, S>, g: &mut BackshiftOnDrop<'_, T, LenT, S>,
) where ) where
F: FnMut(&mut T) -> bool, F: FnMut(&mut T) -> bool,
{ {
while g.processed_len != original_len { while g.processed_len != original_len {
let p = g.v.as_mut_ptr(); let p = g.v.as_mut_ptr();
// SAFETY: Unchecked element must be valid. // SAFETY: Unchecked element must be valid.
let cur = unsafe { &mut *p.add(g.processed_len) }; let cur = unsafe { &mut *p.add(g.processed_len.into_usize()) };
if !f(cur) { if !f(cur) {
// Advance early to avoid double drop if `drop_in_place` panicked. // Advance early to avoid double drop if `drop_in_place` panicked.
g.processed_len += 1; g.processed_len += LenT::ONE;
g.deleted_cnt += 1; g.deleted_cnt += LenT::ONE;
// SAFETY: We never touch this element again after dropped. // SAFETY: We never touch this element again after dropped.
unsafe { ptr::drop_in_place(cur) }; unsafe { ptr::drop_in_place(cur) };
// We already advanced the counter. // We already advanced the counter.
@ -1186,19 +1233,19 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
// SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element.
// We use copy for move, and never touch this element again. // We use copy for move, and never touch this element again.
unsafe { unsafe {
let hole_slot = p.add(g.processed_len - g.deleted_cnt); let hole_slot = p.add((g.processed_len - g.deleted_cnt).into_usize());
ptr::copy_nonoverlapping(cur, hole_slot, 1); ptr::copy_nonoverlapping(cur, hole_slot, 1);
} }
} }
g.processed_len += 1; g.processed_len += LenT::ONE;
} }
} }
// Stage 1: Nothing was deleted. // Stage 1: Nothing was deleted.
process_loop::<F, T, S, false>(original_len, &mut f, &mut g); process_loop::<F, T, LenT, S, false>(original_len, &mut f, &mut g);
// Stage 2: Some elements were deleted. // Stage 2: Some elements were deleted.
process_loop::<F, T, S, true>(original_len, &mut f, &mut g); process_loop::<F, T, LenT, S, true>(original_len, &mut f, &mut g);
// All item are processed. This can be optimized to `set_len` by LLVM. // All item are processed. This can be optimized to `set_len` by LLVM.
drop(g); drop(g);
@ -1232,19 +1279,19 @@ impl<T, S: VecStorage<T> + ?Sized> VecInner<T, S> {
/// ``` /// ```
#[inline] #[inline]
pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] { pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit<T>] {
&mut self.buffer.borrow_mut()[self.len..] &mut self.buffer.borrow_mut()[self.len.into_usize()..]
} }
} }
// Trait implementations // Trait implementations
impl<T, const N: usize> Default for Vec<T, N> { impl<T, LenT: LenType, const N: usize> Default for Vec<T, N, LenT> {
fn default() -> Self { fn default() -> Self {
Self::new() Self::new()
} }
} }
impl<T, S: VecStorage<T> + ?Sized> fmt::Debug for VecInner<T, S> impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> fmt::Debug for VecInner<T, LenT, S>
where where
T: fmt::Debug, T: fmt::Debug,
{ {
@ -1253,7 +1300,7 @@ where
} }
} }
impl<S: VecStorage<u8> + ?Sized> fmt::Write for VecInner<u8, S> { impl<LenT: LenType, S: VecStorage<u8> + ?Sized> fmt::Write for VecInner<u8, LenT, S> {
fn write_str(&mut self, s: &str) -> fmt::Result { fn write_str(&mut self, s: &str) -> fmt::Result {
match self.extend_from_slice(s.as_bytes()) { match self.extend_from_slice(s.as_bytes()) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
@ -1262,13 +1309,13 @@ impl<S: VecStorage<u8> + ?Sized> fmt::Write for VecInner<u8, S> {
} }
} }
impl<T, const N: usize, const M: usize> From<[T; M]> for Vec<T, N> { impl<T, LenT: LenType, const N: usize, const M: usize> From<[T; M]> for Vec<T, N, LenT> {
fn from(array: [T; M]) -> Self { fn from(array: [T; M]) -> Self {
Self::from_array(array) Self::from_array(array)
} }
} }
impl<T, S: VecStorage<T> + ?Sized> Drop for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Drop for VecInner<T, LenT, S> {
fn drop(&mut self) { fn drop(&mut self) {
let mut_slice = self.as_mut_slice(); let mut_slice = self.as_mut_slice();
// We drop each element used in the vector by turning into a `&mut [T]`. // We drop each element used in the vector by turning into a `&mut [T]`.
@ -1279,7 +1326,7 @@ impl<T, S: VecStorage<T> + ?Sized> Drop for VecInner<T, S> {
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
/// Converts the given `alloc::vec::Vec<T>` into a `Vec<T, N>`. /// Converts the given `alloc::vec::Vec<T>` into a `Vec<T, N>`.
impl<T, const N: usize> TryFrom<alloc::vec::Vec<T>> for Vec<T, N> { impl<T, LenT: LenType, const N: usize> TryFrom<alloc::vec::Vec<T>> for Vec<T, N, LenT> {
type Error = CapacityError; type Error = CapacityError;
/// Converts the given `alloc::vec::Vec<T>` into a `Vec<T, N>`. /// Converts the given `alloc::vec::Vec<T>` into a `Vec<T, N>`.
@ -1288,7 +1335,7 @@ impl<T, const N: usize> TryFrom<alloc::vec::Vec<T>> for Vec<T, N> {
/// ///
/// Returns `Err` if the length of the `alloc::vec::Vec<T>` is greater than `N`. /// Returns `Err` if the length of the `alloc::vec::Vec<T>` is greater than `N`.
fn try_from(alloc_vec: alloc::vec::Vec<T>) -> Result<Self, Self::Error> { fn try_from(alloc_vec: alloc::vec::Vec<T>) -> Result<Self, Self::Error> {
let mut vec = Vec::new(); let mut vec = Self::new();
for e in alloc_vec { for e in alloc_vec {
// Push each element individually to allow handling capacity errors. // Push each element individually to allow handling capacity errors.
@ -1301,7 +1348,7 @@ impl<T, const N: usize> TryFrom<alloc::vec::Vec<T>> for Vec<T, N> {
#[cfg(feature = "alloc")] #[cfg(feature = "alloc")]
/// Converts the given `Vec<T, N>` into an `alloc::vec::Vec<T>`. /// Converts the given `Vec<T, N>` into an `alloc::vec::Vec<T>`.
impl<T, const N: usize> TryFrom<Vec<T, N>> for alloc::vec::Vec<T> { impl<T, LenT: LenType, const N: usize> TryFrom<Vec<T, N, LenT>> for alloc::vec::Vec<T> {
type Error = alloc::collections::TryReserveError; type Error = alloc::collections::TryReserveError;
/// Converts the given `Vec<T, N>` into an `alloc::vec::Vec<T>`. /// Converts the given `Vec<T, N>` into an `alloc::vec::Vec<T>`.
@ -1309,8 +1356,8 @@ impl<T, const N: usize> TryFrom<Vec<T, N>> for alloc::vec::Vec<T> {
/// # Errors /// # Errors
/// ///
/// Returns `Err` if the `alloc::vec::Vec` fails to allocate memory. /// Returns `Err` if the `alloc::vec::Vec` fails to allocate memory.
fn try_from(vec: Vec<T, N>) -> Result<Self, Self::Error> { fn try_from(vec: Vec<T, N, LenT>) -> Result<Self, Self::Error> {
let mut alloc_vec = alloc::vec::Vec::new(); let mut alloc_vec = Self::new();
// Allocate enough space for the elements, return an error if the // Allocate enough space for the elements, return an error if the
// allocation fails. // allocation fails.
@ -1324,7 +1371,7 @@ impl<T, const N: usize> TryFrom<Vec<T, N>> for alloc::vec::Vec<T> {
} }
} }
impl<'a, T: Clone, const N: usize> TryFrom<&'a [T]> for Vec<T, N> { impl<'a, T: Clone, LenT: LenType, const N: usize> TryFrom<&'a [T]> for Vec<T, N, LenT> {
type Error = CapacityError; type Error = CapacityError;
fn try_from(slice: &'a [T]) -> Result<Self, Self::Error> { fn try_from(slice: &'a [T]) -> Result<Self, Self::Error> {
@ -1332,7 +1379,7 @@ impl<'a, T: Clone, const N: usize> TryFrom<&'a [T]> for Vec<T, N> {
} }
} }
impl<T, S: VecStorage<T> + ?Sized> Extend<T> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Extend<T> for VecInner<T, LenT, S> {
fn extend<I>(&mut self, iter: I) fn extend<I>(&mut self, iter: I)
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = T>,
@ -1341,7 +1388,7 @@ impl<T, S: VecStorage<T> + ?Sized> Extend<T> for VecInner<T, S> {
} }
} }
impl<'a, T, S: VecStorage<T> + ?Sized> Extend<&'a T> for VecInner<T, S> impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> Extend<&'a T> for VecInner<T, LenT, S>
where where
T: 'a + Copy, T: 'a + Copy,
{ {
@ -1353,7 +1400,7 @@ where
} }
} }
impl<T, S: VecStorage<T> + ?Sized> hash::Hash for VecInner<T, S> impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> hash::Hash for VecInner<T, LenT, S>
where where
T: core::hash::Hash, T: core::hash::Hash,
{ {
@ -1362,7 +1409,7 @@ where
} }
} }
impl<'a, T, S: VecStorage<T> + ?Sized> IntoIterator for &'a VecInner<T, S> { impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> IntoIterator for &'a VecInner<T, LenT, S> {
type Item = &'a T; type Item = &'a T;
type IntoIter = slice::Iter<'a, T>; type IntoIter = slice::Iter<'a, T>;
@ -1371,7 +1418,9 @@ impl<'a, T, S: VecStorage<T> + ?Sized> IntoIterator for &'a VecInner<T, S> {
} }
} }
impl<'a, T, S: VecStorage<T> + ?Sized> IntoIterator for &'a mut VecInner<T, S> { impl<'a, T, LenT: LenType, S: VecStorage<T> + ?Sized> IntoIterator
for &'a mut VecInner<T, LenT, S>
{
type Item = &'a mut T; type Item = &'a mut T;
type IntoIter = slice::IterMut<'a, T>; type IntoIter = slice::IterMut<'a, T>;
@ -1380,7 +1429,7 @@ impl<'a, T, S: VecStorage<T> + ?Sized> IntoIterator for &'a mut VecInner<T, S> {
} }
} }
impl<T, const N: usize> FromIterator<T> for Vec<T, N> { impl<T, LenT: LenType, const N: usize> FromIterator<T> for Vec<T, N, LenT> {
fn from_iter<I>(iter: I) -> Self fn from_iter<I>(iter: I) -> Self
where where
I: IntoIterator<Item = T>, I: IntoIterator<Item = T>,
@ -1396,24 +1445,24 @@ impl<T, const N: usize> FromIterator<T> for Vec<T, N> {
/// An iterator that moves out of an [`Vec`][`Vec`]. /// An iterator that moves out of an [`Vec`][`Vec`].
/// ///
/// This struct is created by calling the `into_iter` method on [`Vec`][`Vec`]. /// This struct is created by calling the `into_iter` method on [`Vec`][`Vec`].
pub struct IntoIter<T, const N: usize> { pub struct IntoIter<T, const N: usize, LenT: LenType> {
vec: Vec<T, N>, vec: Vec<T, N, LenT>,
next: usize, next: LenT,
} }
impl<T, const N: usize> Iterator for IntoIter<T, N> { impl<T, LenT: LenType, const N: usize> Iterator for IntoIter<T, N, LenT> {
type Item = T; type Item = T;
fn next(&mut self) -> Option<Self::Item> { fn next(&mut self) -> Option<Self::Item> {
if self.next < self.vec.len() { if self.next < self.vec.len {
let item = unsafe { let item = unsafe {
self.vec self.vec
.buffer .buffer
.buffer .buffer
.get_unchecked_mut(self.next) .get_unchecked_mut(self.next.into_usize())
.as_ptr() .as_ptr()
.read() .read()
}; };
self.next += 1; self.next += LenT::ONE;
Some(item) Some(item)
} else { } else {
None None
@ -1421,105 +1470,128 @@ impl<T, const N: usize> Iterator for IntoIter<T, N> {
} }
} }
impl<T, const N: usize> Clone for IntoIter<T, N> impl<T, LenT: LenType, const N: usize> Clone for IntoIter<T, N, LenT>
where where
T: Clone, T: Clone,
{ {
fn clone(&self) -> Self { fn clone(&self) -> Self {
let mut vec = Vec::new(); let mut vec = Vec::new();
if self.next < self.vec.len() { if self.next < self.vec.len {
let s = unsafe { let s = unsafe {
slice::from_raw_parts( slice::from_raw_parts(
self.vec.buffer.buffer.as_ptr().cast::<T>().add(self.next), self.vec
self.vec.len() - self.next, .buffer
.buffer
.as_ptr()
.cast::<T>()
.add(self.next.into_usize()),
(self.vec.len - self.next).into_usize(),
) )
}; };
vec.extend_from_slice(s).ok(); vec.extend_from_slice(s).ok();
} }
Self { vec, next: 0 } Self {
vec,
next: LenT::ZERO,
}
} }
} }
impl<T, const N: usize> Drop for IntoIter<T, N> { impl<T, LenT: LenType, const N: usize> Drop for IntoIter<T, N, LenT> {
fn drop(&mut self) { fn drop(&mut self) {
unsafe { unsafe {
// Drop all the elements that have not been moved out of vec // Drop all the elements that have not been moved out of vec
ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next..]); ptr::drop_in_place(&mut self.vec.as_mut_slice()[self.next.into_usize()..]);
// Prevent dropping of other elements // Prevent dropping of other elements
self.vec.len = 0; self.vec.len = LenT::ZERO;
} }
} }
} }
impl<T, const N: usize> IntoIterator for Vec<T, N> { impl<T, LenT: LenType, const N: usize> IntoIterator for Vec<T, N, LenT> {
type Item = T; type Item = T;
type IntoIter = IntoIter<T, N>; type IntoIter = IntoIter<T, N, LenT>;
fn into_iter(self) -> Self::IntoIter { fn into_iter(self) -> Self::IntoIter {
IntoIter { vec: self, next: 0 } IntoIter {
vec: self,
next: LenT::ZERO,
}
} }
} }
impl<A, B, SA: VecStorage<A> + ?Sized, SB: VecStorage<B> + ?Sized> PartialEq<VecInner<B, SB>> impl<A, B, LenTA, LenTB, SA, SB> PartialEq<VecInner<B, LenTB, SB>> for VecInner<A, LenTA, SA>
for VecInner<A, SA>
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTA: LenType,
LenTB: LenType,
SA: VecStorage<A> + ?Sized,
SB: VecStorage<B> + ?Sized,
{ {
fn eq(&self, other: &VecInner<B, SB>) -> bool { fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
self.as_slice().eq(other.as_slice()) self.as_slice().eq(other.as_slice())
} }
} }
impl<A, B, const M: usize, SB: VecStorage<B> + ?Sized> PartialEq<VecInner<B, SB>> for [A; M] impl<A, B, LenTB, const M: usize, SB> PartialEq<VecInner<B, LenTB, SB>> for [A; M]
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTB: LenType,
SB: VecStorage<B>,
{ {
fn eq(&self, other: &VecInner<B, SB>) -> bool { fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
self.eq(other.as_slice()) self.eq(other.as_slice())
} }
} }
impl<A, B, SB: VecStorage<B> + ?Sized, const M: usize> PartialEq<VecInner<B, SB>> for &[A; M] impl<A, B, LenTB, SB, const M: usize> PartialEq<VecInner<B, LenTB, SB>> for &[A; M]
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTB: LenType,
SB: VecStorage<B>,
{ {
fn eq(&self, other: &VecInner<B, SB>) -> bool { fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
(*self).eq(other) (*self).eq(other)
} }
} }
impl<A, B, SB: VecStorage<B> + ?Sized> PartialEq<VecInner<B, SB>> for [A] impl<A, B, LenTB, SB> PartialEq<VecInner<B, LenTB, SB>> for [A]
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTB: LenType,
SB: VecStorage<B>,
{ {
fn eq(&self, other: &VecInner<B, SB>) -> bool { fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
self.eq(other.as_slice()) self.eq(other.as_slice())
} }
} }
impl<A, B, SB: VecStorage<B> + ?Sized> PartialEq<VecInner<B, SB>> for &[A] impl<A, B, LenTB, SB> PartialEq<VecInner<B, LenTB, SB>> for &[A]
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTB: LenType,
SB: VecStorage<B>,
{ {
fn eq(&self, other: &VecInner<B, SB>) -> bool { fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
(*self).eq(other) (*self).eq(other)
} }
} }
impl<A, B, SB: VecStorage<B> + ?Sized> PartialEq<VecInner<B, SB>> for &mut [A] impl<A, B, LenTB: LenType, SB: VecStorage<B>> PartialEq<VecInner<B, LenTB, SB>> for &mut [A]
where where
A: PartialEq<B>, A: PartialEq<B>,
{ {
fn eq(&self, other: &VecInner<B, SB>) -> bool { fn eq(&self, other: &VecInner<B, LenTB, SB>) -> bool {
(**self).eq(other) (**self).eq(other)
} }
} }
impl<A, B, SA: VecStorage<A> + ?Sized, const N: usize> PartialEq<[B; N]> for VecInner<A, SA> impl<A, B, LenTA: LenType, SA, const N: usize> PartialEq<[B; N]> for VecInner<A, LenTA, SA>
where where
A: PartialEq<B>, A: PartialEq<B>,
SA: VecStorage<A> + ?Sized,
{ {
#[inline] #[inline]
fn eq(&self, other: &[B; N]) -> bool { fn eq(&self, other: &[B; N]) -> bool {
@ -1527,9 +1599,11 @@ where
} }
} }
impl<A, B, SA: VecStorage<A> + ?Sized, const N: usize> PartialEq<&[B; N]> for VecInner<A, SA> impl<A, B, LenTA, SA, const N: usize> PartialEq<&[B; N]> for VecInner<A, LenTA, SA>
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTA: LenType,
SA: VecStorage<A> + ?Sized,
{ {
#[inline] #[inline]
fn eq(&self, other: &&[B; N]) -> bool { fn eq(&self, other: &&[B; N]) -> bool {
@ -1537,9 +1611,11 @@ where
} }
} }
impl<A, B, SA: VecStorage<A> + ?Sized> PartialEq<[B]> for VecInner<A, SA> impl<A, B, LenTA, SA> PartialEq<[B]> for VecInner<A, LenTA, SA>
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTA: LenType,
SA: VecStorage<A> + ?Sized,
{ {
#[inline] #[inline]
fn eq(&self, other: &[B]) -> bool { fn eq(&self, other: &[B]) -> bool {
@ -1547,9 +1623,11 @@ where
} }
} }
impl<A, B, SA: VecStorage<A> + ?Sized> PartialEq<&[B]> for VecInner<A, SA> impl<A, B, LenTA, SA> PartialEq<&[B]> for VecInner<A, LenTA, SA>
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTA: LenType,
SA: VecStorage<A> + ?Sized,
{ {
#[inline] #[inline]
fn eq(&self, other: &&[B]) -> bool { fn eq(&self, other: &&[B]) -> bool {
@ -1557,9 +1635,11 @@ where
} }
} }
impl<A, B, SA: VecStorage<A> + ?Sized> PartialEq<&mut [B]> for VecInner<A, SA> impl<A, B, LenTA, SA> PartialEq<&mut [B]> for VecInner<A, LenTA, SA>
where where
A: PartialEq<B>, A: PartialEq<B>,
LenTA: LenType,
SA: VecStorage<A> + ?Sized,
{ {
#[inline] #[inline]
fn eq(&self, other: &&mut [B]) -> bool { fn eq(&self, other: &&mut [B]) -> bool {
@ -1568,19 +1648,19 @@ where
} }
// Implements Eq if underlying data is Eq // Implements Eq if underlying data is Eq
impl<T, S: VecStorage<T> + ?Sized> Eq for VecInner<T, S> where T: Eq {} impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Eq for VecInner<T, LenT, S> where T: Eq {}
impl<T, SA: VecStorage<T> + ?Sized, SB: VecStorage<T> + ?Sized> PartialOrd<VecInner<T, SA>> impl<T, LenTA: LenType, LenTB: LenType, SA: VecStorage<T> + ?Sized, SB: VecStorage<T> + ?Sized>
for VecInner<T, SB> PartialOrd<VecInner<T, LenTA, SA>> for VecInner<T, LenTB, SB>
where where
T: PartialOrd, T: PartialOrd,
{ {
fn partial_cmp(&self, other: &VecInner<T, SA>) -> Option<Ordering> { fn partial_cmp(&self, other: &VecInner<T, LenTA, SA>) -> Option<Ordering> {
self.as_slice().partial_cmp(other.as_slice()) self.as_slice().partial_cmp(other.as_slice())
} }
} }
impl<T, S: VecStorage<T> + ?Sized> Ord for VecInner<T, S> impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> Ord for VecInner<T, LenT, S>
where where
T: Ord, T: Ord,
{ {
@ -1590,7 +1670,7 @@ where
} }
} }
impl<T, S: VecStorage<T> + ?Sized> ops::Deref for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> ops::Deref for VecInner<T, LenT, S> {
type Target = [T]; type Target = [T];
fn deref(&self) -> &Self::Target { fn deref(&self) -> &Self::Target {
@ -1598,52 +1678,52 @@ impl<T, S: VecStorage<T> + ?Sized> ops::Deref for VecInner<T, S> {
} }
} }
impl<T, S: VecStorage<T> + ?Sized> ops::DerefMut for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> ops::DerefMut for VecInner<T, LenT, S> {
fn deref_mut(&mut self) -> &mut Self::Target { fn deref_mut(&mut self) -> &mut Self::Target {
self.as_mut_slice() self.as_mut_slice()
} }
} }
impl<T, S: VecStorage<T> + ?Sized> borrow::Borrow<[T]> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> borrow::Borrow<[T]> for VecInner<T, LenT, S> {
fn borrow(&self) -> &[T] { fn borrow(&self) -> &[T] {
self.as_slice() self.as_slice()
} }
} }
impl<T, S: VecStorage<T> + ?Sized> borrow::BorrowMut<[T]> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> borrow::BorrowMut<[T]> for VecInner<T, LenT, S> {
fn borrow_mut(&mut self) -> &mut [T] { fn borrow_mut(&mut self) -> &mut [T] {
self.as_mut_slice() self.as_mut_slice()
} }
} }
impl<T, S: VecStorage<T> + ?Sized> AsRef<Self> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsRef<Self> for VecInner<T, LenT, S> {
#[inline] #[inline]
fn as_ref(&self) -> &Self { fn as_ref(&self) -> &Self {
self self
} }
} }
impl<T, S: VecStorage<T> + ?Sized> AsMut<Self> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsMut<Self> for VecInner<T, LenT, S> {
#[inline] #[inline]
fn as_mut(&mut self) -> &mut Self { fn as_mut(&mut self) -> &mut Self {
self self
} }
} }
impl<T, S: VecStorage<T> + ?Sized> AsRef<[T]> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsRef<[T]> for VecInner<T, LenT, S> {
#[inline] #[inline]
fn as_ref(&self) -> &[T] { fn as_ref(&self) -> &[T] {
self self
} }
} }
impl<T, S: VecStorage<T> + ?Sized> AsMut<[T]> for VecInner<T, S> { impl<T, LenT: LenType, S: VecStorage<T> + ?Sized> AsMut<[T]> for VecInner<T, LenT, S> {
#[inline] #[inline]
fn as_mut(&mut self) -> &mut [T] { fn as_mut(&mut self) -> &mut [T] {
self self
} }
} }
impl<T, const N: usize> Clone for Vec<T, N> impl<T, const N: usize, LenT: LenType> Clone for Vec<T, N, LenT>
where where
T: Clone, T: Clone,
{ {
@ -1726,7 +1806,7 @@ mod tests {
{ {
let v: Vec<Droppable, 2> = Vec::new(); let v: Vec<Droppable, 2> = Vec::new();
let v: Box<Vec<Droppable, 2>> = Box::new(v); let v: Box<Vec<Droppable, 2>> = Box::new(v);
let mut v: Box<VecView<Droppable>> = v; let mut v: Box<VecView<Droppable, u8>> = v;
v.push(Droppable::new()).ok().unwrap(); v.push(Droppable::new()).ok().unwrap();
v.push(Droppable::new()).ok().unwrap(); v.push(Droppable::new()).ok().unwrap();
assert_eq!(Droppable::count(), 2); assert_eq!(Droppable::count(), 2);
@ -1739,7 +1819,7 @@ mod tests {
{ {
let v: Vec<Droppable, 2> = Vec::new(); let v: Vec<Droppable, 2> = Vec::new();
let v: Box<Vec<Droppable, 2>> = Box::new(v); let v: Box<Vec<Droppable, 2>> = Box::new(v);
let mut v: Box<VecView<Droppable>> = v; let mut v: Box<VecView<Droppable, u8>> = v;
v.push(Droppable::new()).ok().unwrap(); v.push(Droppable::new()).ok().unwrap();
v.push(Droppable::new()).ok().unwrap(); v.push(Droppable::new()).ok().unwrap();
assert_eq!(Droppable::count(), 2); assert_eq!(Droppable::count(), 2);