Initial conversion to const generics

vec passes tests

vec passes tests with docs as well (besides one with FromIter)

vec passes tests with docs as well (besides one with FromIter)

exposing full API

passing all current tests

starting with string

string test passes mostly

string test passes

ufmt passes

TODO removed

binary_heap wip

binary_heap passes tests

sealed passes

spsc wip

spsc wip2

split wip3

spcs and split passes --lib tests

spcs and split passes --lib tests

spcs and split passes --lib tests

spcs and split passes all tests (doc + lib)

indexmap wip

indexmap passes --lib test

indexmap passes all tests (lib + doc)

indexset passes all tests (lib + doc)

indexset passes all tests (lib + doc)

linear map wip

linear map all test (lib + doc) passes, drop not tested, into_iter(mut self) not implemented

history buffer all test pass (doc + lib), Copy instead of clone atm

serde does not work

pool works, serde still not

serde wip

serde wip

serde wip

serde wip
This commit is contained in:
Per Lindgren 2020-08-20 16:00:55 +02:00 committed by Emil Fresk
parent 6ba462eb93
commit f962e3a45f
17 changed files with 1185 additions and 1474 deletions

View File

@ -33,7 +33,7 @@ scoped_threadpool = "0.1.8"
[dependencies]
as-slice = "0.1.0"
generic-array = "0.13.0"
# generic-array = "0.13.0"
hash32 = "0.1.0"
[dependencies.serde]

View File

@ -16,9 +16,8 @@ use core::{
ptr, slice,
};
use generic_array::{ArrayLength, GenericArray};
use crate::sealed::binary_heap::Kind;
use crate::vec::Vec;
/// Min-heap
pub enum Min {}
@ -26,17 +25,6 @@ pub enum Min {}
/// Max-heap
pub enum Max {}
impl<A, K> crate::i::BinaryHeap<A, K> {
/// `BinaryHeap` `const` constructor; wrap the returned value in
/// [`BinaryHeap`](../struct.BinaryHeap.html)
pub const fn new() -> Self {
Self {
_kind: PhantomData,
data: crate::i::Vec::new(),
}
}
}
/// A priority queue implemented with a binary heap.
///
/// This can be either a min-heap or a max-heap.
@ -47,9 +35,8 @@ impl<A, K> crate::i::BinaryHeap<A, K> {
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
///
/// // We can use peek to look at the next item in the heap. In this case,
/// // there's no items in there yet so we get None.
@ -84,18 +71,19 @@ impl<A, K> crate::i::BinaryHeap<A, K> {
/// // The heap should now be empty.
/// assert!(heap.is_empty())
/// ```
pub struct BinaryHeap<T, N, KIND>(
#[doc(hidden)] pub crate::i::BinaryHeap<GenericArray<T, N>, KIND>,
)
where
T: Ord,
N: ArrayLength<T>,
KIND: Kind;
impl<T, N, K> BinaryHeap<T, N, K>
pub struct BinaryHeap<T, K, const N: usize>
where
T: Ord,
K: Kind,
{
pub(crate) _kind: PhantomData<K>,
pub(crate) data: Vec<T, N>,
}
impl<T, K, const N: usize> BinaryHeap<T, K, N>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
/* Constructors */
@ -103,32 +91,33 @@ where
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// // allocate the binary heap on the stack
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// heap.push(4).unwrap();
///
/// // allocate the binary heap in a static variable
/// static mut HEAP: BinaryHeap<i32, U8, Max> = BinaryHeap(heapless::i::BinaryHeap::new());
/// static mut HEAP: BinaryHeap<i32, Max, 8> = BinaryHeap::new();
/// ```
pub fn new() -> Self {
BinaryHeap(crate::i::BinaryHeap::new())
pub const fn new() -> Self {
Self {
_kind: PhantomData,
data: Vec::new(),
}
}
/* Public API */
/// Returns the capacity of the binary heap.
pub fn capacity(&self) -> usize {
self.0.data.capacity()
self.data.capacity()
}
/// Drops all items from the binary heap.
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// heap.push(1).unwrap();
/// heap.push(3).unwrap();
///
@ -139,32 +128,30 @@ where
/// assert!(heap.is_empty());
/// ```
pub fn clear(&mut self) {
self.0.data.clear()
self.data.clear()
}
/// Returns the length of the binary heap.
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// heap.push(1).unwrap();
/// heap.push(3).unwrap();
///
/// assert_eq!(heap.len(), 2);
/// ```
pub fn len(&self) -> usize {
self.0.data.len
self.data.len()
}
/// Checks if the binary heap is empty.
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
///
/// assert!(heap.is_empty());
///
@ -182,9 +169,8 @@ where
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// heap.push(1).unwrap();
/// heap.push(2).unwrap();
/// heap.push(3).unwrap();
@ -197,7 +183,7 @@ where
/// }
/// ```
pub fn iter(&self) -> slice::Iter<'_, T> {
self.0.data.as_slice().iter()
self.data.as_slice().iter()
}
/// Returns a mutable iterator visiting all values in the underlying vector, in arbitrary order.
@ -205,7 +191,7 @@ where
/// **WARNING** Mutating the items in the binary heap can leave the heap in an inconsistent
/// state.
pub fn iter_mut(&mut self) -> slice::IterMut<'_, T> {
self.0.data.as_mut_slice().iter_mut()
self.data.as_mut_slice().iter_mut()
}
/// Returns the *top* (greatest if max-heap, smallest if min-heap) item in the binary heap, or
@ -213,9 +199,8 @@ where
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// assert_eq!(heap.peek(), None);
///
/// heap.push(1).unwrap();
@ -224,7 +209,7 @@ where
/// assert_eq!(heap.peek(), Some(&5));
/// ```
pub fn peek(&self) -> Option<&T> {
self.0.data.as_slice().get(0)
self.data.as_slice().get(0)
}
/// Returns a mutable reference to the greatest item in the binary heap, or
@ -239,9 +224,8 @@ where
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// assert!(heap.peek_mut().is_none());
///
/// heap.push(1);
@ -254,7 +238,7 @@ where
///
/// assert_eq!(heap.peek(), Some(&2));
/// ```
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, N, K>> {
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T, K, N>> {
if self.is_empty() {
None
} else {
@ -270,9 +254,8 @@ where
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// heap.push(1).unwrap();
/// heap.push(3).unwrap();
///
@ -291,10 +274,10 @@ where
/// Removes the *top* (greatest if max-heap, smallest if min-heap) item from the binary heap and
/// returns it, without checking if the binary heap is empty.
pub unsafe fn pop_unchecked(&mut self) -> T {
let mut item = self.0.data.pop_unchecked();
let mut item = self.data.pop_unchecked();
if !self.is_empty() {
mem::swap(&mut item, self.0.data.as_mut_slice().get_unchecked_mut(0));
mem::swap(&mut item, self.data.as_mut_slice().get_unchecked_mut(0));
self.sift_down_to_bottom(0);
}
item
@ -304,9 +287,8 @@ where
///
/// ```
/// use heapless::binary_heap::{BinaryHeap, Max};
/// use heapless::consts::*;
///
/// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
/// let mut heap: BinaryHeap<_, Max, 8> = BinaryHeap::new();
/// heap.push(3).unwrap();
/// heap.push(5).unwrap();
/// heap.push(1).unwrap();
@ -315,7 +297,7 @@ where
/// assert_eq!(heap.peek(), Some(&5));
/// ```
pub fn push(&mut self, item: T) -> Result<(), T> {
if self.0.data.is_full() {
if self.data.is_full() {
return Err(item);
}
@ -326,7 +308,7 @@ where
/// Pushes an item onto the binary heap without first checking if it's full.
pub unsafe fn push_unchecked(&mut self, item: T) {
let old_len = self.len();
self.0.data.push_unchecked(item);
self.data.push_unchecked(item);
self.sift_up(0, old_len);
}
@ -335,7 +317,7 @@ where
let end = self.len();
let start = pos;
unsafe {
let mut hole = Hole::new(self.0.data.as_mut_slice(), pos);
let mut hole = Hole::new(self.data.as_mut_slice(), pos);
let mut child = 2 * pos + 1;
while child < end {
let right = child + 1;
@ -354,7 +336,7 @@ where
fn sift_up(&mut self, start: usize, pos: usize) -> usize {
unsafe {
// Take out the value at `pos` and create a hole.
let mut hole = Hole::new(self.0.data.as_mut_slice(), pos);
let mut hole = Hole::new(self.data.as_mut_slice(), pos);
while hole.pos() > start {
let parent = (hole.pos() - 1) / 2;
@ -437,20 +419,18 @@ impl<'a, T> Hole<'a, T> {
///
/// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut
/// [`BinaryHeap`]: struct.BinaryHeap.html
pub struct PeekMut<'a, T, N, K>
pub struct PeekMut<'a, T, K, const N: usize>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
heap: &'a mut BinaryHeap<T, N, K>,
heap: &'a mut BinaryHeap<T, K, N>,
sift: bool,
}
impl<T, N, K> Drop for PeekMut<'_, T, N, K>
impl<T, K, const N: usize> Drop for PeekMut<'_, T, K, N>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
fn drop(&mut self) {
@ -460,41 +440,38 @@ where
}
}
impl<T, N, K> Deref for PeekMut<'_, T, N, K>
impl<T, K, const N: usize> Deref for PeekMut<'_, T, K, N>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
type Target = T;
fn deref(&self) -> &T {
debug_assert!(!self.heap.is_empty());
// SAFE: PeekMut is only instantiated for non-empty heaps
unsafe { self.heap.0.data.as_slice().get_unchecked(0) }
unsafe { self.heap.data.as_slice().get_unchecked(0) }
}
}
impl<T, N, K> DerefMut for PeekMut<'_, T, N, K>
impl<T, K, const N: usize> DerefMut for PeekMut<'_, T, K, N>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
fn deref_mut(&mut self) -> &mut T {
debug_assert!(!self.heap.is_empty());
// SAFE: PeekMut is only instantiated for non-empty heaps
unsafe { self.heap.0.data.as_mut_slice().get_unchecked_mut(0) }
unsafe { self.heap.data.as_mut_slice().get_unchecked_mut(0) }
}
}
impl<'a, T, N, K> PeekMut<'a, T, N, K>
impl<'a, T, K, const N: usize> PeekMut<'a, T, K, N>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
/// Removes the peeked value from the heap and returns it.
pub fn pop(mut this: PeekMut<'a, T, N, K>) -> T {
pub fn pop(mut this: PeekMut<'a, T, K, N>) -> T {
let value = this.heap.pop().unwrap();
this.sift = false;
value
@ -512,10 +489,9 @@ impl<'a, T> Drop for Hole<'a, T> {
}
}
impl<T, N, K> Default for BinaryHeap<T, N, K>
impl<T, K, const N: usize> Default for BinaryHeap<T, K, N>
where
T: Ord,
N: ArrayLength<T>,
K: Kind,
{
fn default() -> Self {
@ -523,34 +499,31 @@ where
}
}
impl<T, N, K> Clone for BinaryHeap<T, N, K>
impl<T, K, const N: usize> Clone for BinaryHeap<T, K, N>
where
N: ArrayLength<T>,
K: Kind,
T: Ord + Clone,
{
fn clone(&self) -> Self {
BinaryHeap(crate::i::BinaryHeap {
_kind: self.0._kind,
data: self.0.data.clone(),
})
Self {
_kind: self._kind,
data: self.data.clone(),
}
}
}
impl<T, N, K> Drop for BinaryHeap<T, N, K>
impl<T, K, const N: usize> Drop for BinaryHeap<T, K, N>
where
N: ArrayLength<T>,
K: Kind,
T: Ord,
{
fn drop(&mut self) {
unsafe { ptr::drop_in_place(self.0.data.as_mut_slice()) }
unsafe { ptr::drop_in_place(self.data.as_mut_slice()) }
}
}
impl<T, N, K> fmt::Debug for BinaryHeap<T, N, K>
impl<T, K, const N: usize> fmt::Debug for BinaryHeap<T, K, N>
where
N: ArrayLength<T>,
K: Kind,
T: Ord + fmt::Debug,
{
@ -559,9 +532,8 @@ where
}
}
impl<'a, T, N, K> IntoIterator for &'a BinaryHeap<T, N, K>
impl<'a, T, K, const N: usize> IntoIterator for &'a BinaryHeap<T, K, N>
where
N: ArrayLength<T>,
K: Kind,
T: Ord,
{
@ -577,19 +549,16 @@ where
mod tests {
use std::vec::Vec;
use crate::{
binary_heap::{self, BinaryHeap, Min},
consts::*,
};
use crate::binary_heap::{BinaryHeap, Max, Min};
#[test]
fn static_new() {
static mut _B: BinaryHeap<i32, U16, Min> = BinaryHeap(crate::i::BinaryHeap::new());
static mut _B: BinaryHeap<i32, Min, 16> = BinaryHeap::new();
}
#[test]
fn min() {
let mut heap = BinaryHeap::<_, U16, Min>::new();
let mut heap = BinaryHeap::<_, Min, 16>::new();
heap.push(1).unwrap();
heap.push(2).unwrap();
heap.push(3).unwrap();
@ -641,7 +610,7 @@ mod tests {
#[test]
fn max() {
let mut heap = BinaryHeap::<_, U16, binary_heap::Max>::new();
let mut heap = BinaryHeap::<_, Max, 16>::new();
heap.push(1).unwrap();
heap.push(2).unwrap();
heap.push(3).unwrap();

View File

@ -1,6 +1,8 @@
//! missing doc
use core::{fmt, marker::PhantomData};
use generic_array::{typenum::PowerOfTwo, ArrayLength};
// use generic_array::{typenum::PowerOfTwo, ArrayLength};
use hash32::{BuildHasherDefault, Hash, Hasher};
use serde::de::{self, Deserialize, Deserializer, Error, MapAccess, SeqAccess};
@ -12,25 +14,24 @@ use crate::{
// Sequential containers
impl<'de, T, N, KIND> Deserialize<'de> for BinaryHeap<T, N, KIND>
impl<'de, T, KIND, const N: usize> Deserialize<'de> for BinaryHeap<T, KIND, N>
where
T: Ord + Deserialize<'de>,
N: ArrayLength<T>,
KIND: BinaryHeapKind,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor<'de, T, N, KIND>(PhantomData<(&'de (), T, N, KIND)>);
struct ValueVisitor<'de, T, KIND, const N: usize>(PhantomData<(&'de (), T, KIND)>);
impl<'de, T, N, KIND> de::Visitor<'de> for ValueVisitor<'de, T, N, KIND>
impl<'de, T, KIND, const N: usize> de::Visitor<'de> for ValueVisitor<'de, T, KIND, N>
where
T: Ord + Deserialize<'de>,
N: ArrayLength<T>,
KIND: BinaryHeapKind,
{
type Value = BinaryHeap<T, N, KIND>;
type Value = BinaryHeap<T, KIND, N>;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a sequence")
@ -55,25 +56,23 @@ where
}
}
impl<'de, T, N, S> Deserialize<'de> for IndexSet<T, N, BuildHasherDefault<S>>
impl<'de, T, S, const N: usize> Deserialize<'de> for IndexSet<T, BuildHasherDefault<S>, N>
where
T: Eq + Hash + Deserialize<'de>,
S: Hasher + Default,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>> + PowerOfTwo,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor<'de, T, N, S>(PhantomData<(&'de (), T, N, S)>);
struct ValueVisitor<'de, T, S, const N: usize>(PhantomData<(&'de (), T, S)>);
impl<'de, T, N, S> de::Visitor<'de> for ValueVisitor<'de, T, N, S>
impl<'de, T, S, const N: usize> de::Visitor<'de> for ValueVisitor<'de, T, S, N>
where
T: Eq + Hash + Deserialize<'de>,
S: Hasher + Default,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>> + PowerOfTwo,
{
type Value = IndexSet<T, N, BuildHasherDefault<S>>;
type Value = IndexSet<T, BuildHasherDefault<S>, N>;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a sequence")
@ -98,20 +97,18 @@ where
}
}
impl<'de, T, N> Deserialize<'de> for Vec<T, N>
impl<'de, T, const N: usize> Deserialize<'de> for Vec<T, N>
where
N: ArrayLength<T>,
T: Deserialize<'de>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor<'de, T, N>(PhantomData<(&'de (), T, N)>);
struct ValueVisitor<'de, T, const N: usize>(PhantomData<(&'de (), T)>);
impl<'de, T, N> de::Visitor<'de> for ValueVisitor<'de, T, N>
impl<'de, T, const N: usize> serde::de::Visitor<'de> for ValueVisitor<'de, T, N>
where
N: ArrayLength<T>,
T: Deserialize<'de>,
{
type Value = Vec<T, N>;
@ -141,27 +138,25 @@ where
// Dictionaries
impl<'de, K, V, N, S> Deserialize<'de> for IndexMap<K, V, N, BuildHasherDefault<S>>
impl<'de, K, V, S, const N: usize> Deserialize<'de> for IndexMap<K, V, BuildHasherDefault<S>, N>
where
K: Eq + Hash + Deserialize<'de>,
V: Deserialize<'de>,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>> + PowerOfTwo,
S: Default + Hasher,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor<'de, K, V, N, S>(PhantomData<(&'de (), K, V, N, S)>);
struct ValueVisitor<'de, K, V, S, const N:usize>(PhantomData<(&'de (), K, V, S)>);
impl<'de, K, V, N, S> de::Visitor<'de> for ValueVisitor<'de, K, V, N, S>
impl<'de, K, V, S, const N:usize> de::Visitor<'de> for ValueVisitor<'de, K, V, S, N>
where
K: Eq + Hash + Deserialize<'de>,
V: Deserialize<'de>,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>> + PowerOfTwo,
S: Default + Hasher,
{
type Value = IndexMap<K, V, N, BuildHasherDefault<S>>;
type Value = IndexMap<K, V, BuildHasherDefault<S>, N>;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("a map")
@ -186,23 +181,21 @@ where
}
}
impl<'de, K, V, N> Deserialize<'de> for LinearMap<K, V, N>
impl<'de, K, V, const N:usize> Deserialize<'de> for LinearMap<K, V, N>
where
K: Eq + Deserialize<'de>,
V: Deserialize<'de>,
N: ArrayLength<(K, V)>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor<'de, K, V, N>(PhantomData<(&'de (), K, V, N)>);
struct ValueVisitor<'de, K, V, const N:usize>(PhantomData<(&'de (), K, V)>);
impl<'de, K, V, N> de::Visitor<'de> for ValueVisitor<'de, K, V, N>
impl<'de, K, V, const N:usize> de::Visitor<'de> for ValueVisitor<'de, K, V, N>
where
K: Eq + Deserialize<'de>,
V: Deserialize<'de>,
N: ArrayLength<(K, V)>,
{
type Value = LinearMap<K, V, N>;
@ -231,19 +224,15 @@ where
// String containers
impl<'de, N> Deserialize<'de> for String<N>
where
N: ArrayLength<u8>,
impl<'de, const N:usize> Deserialize<'de> for String<N>
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct ValueVisitor<'de, N>(PhantomData<(&'de (), N)>);
struct ValueVisitor<'de, const N:usize>(PhantomData<(&'de ())>);
impl<'de, N> de::Visitor<'de> for ValueVisitor<'de, N>
where
N: ArrayLength<u8>,
impl<'de, const N:usize > de::Visitor<'de> for ValueVisitor<'de, N>
{
type Value = String<N>;
@ -251,7 +240,7 @@ where
write!(
formatter,
"a string no more than {} bytes long",
N::to_u64()
N as u64
)
}

View File

@ -1,4 +1,4 @@
use generic_array::{ArrayLength, GenericArray, sequence::GenericSequence};
// use generic_array::{sequence::GenericSequence, ArrayLength, GenericArray};
/// A "history buffer", similar to a write-only ring buffer of fixed length.
///
@ -14,10 +14,9 @@ use generic_array::{ArrayLength, GenericArray, sequence::GenericSequence};
/// # Examples
/// ```
/// use heapless::HistoryBuffer;
/// use heapless::consts::*;
///
/// // Initialize a new buffer with 8 elements, all initially zero.
/// let mut buf = HistoryBuffer::<_, U8>::new();
/// let mut buf = HistoryBuffer::<_, 8>::new();
///
/// buf.write(3);
/// buf.write(5);
@ -34,19 +33,14 @@ use generic_array::{ArrayLength, GenericArray, sequence::GenericSequence};
/// assert_eq!(avg, 2);
/// ```
#[derive(Clone)]
pub struct HistoryBuffer<T, N>
where
N: ArrayLength<T>,
{
data: GenericArray<T, N>,
pub struct HistoryBuffer<T, const N: usize> {
data: [T; N],
write_at: usize,
}
impl<T, N> HistoryBuffer<T, N>
impl<T, const N: usize> HistoryBuffer<T, N>
where
N: ArrayLength<T>,
T: Default,
T: Default + Copy,
{
/// Constructs a new history buffer, where every element is filled with the
/// default value of the type `T`.
@ -57,16 +51,17 @@ where
///
/// ```
/// use heapless::HistoryBuffer;
/// use heapless::consts::*;
///
/// // Allocate a 16-element buffer on the stack
/// let mut x: HistoryBuffer<u8, U16> = HistoryBuffer::new();
/// let mut x: HistoryBuffer<u8, 16> = HistoryBuffer::new();
/// // All elements are zero
/// assert_eq!(x.as_slice(), [0; 16]);
/// ```
pub fn new() -> Self {
Self {
data: Default::default(),
// seems not yet implemented
// data: Default::default(),
data: [T::default(); N],
write_at: 0,
}
}
@ -78,10 +73,9 @@ where
}
}
impl<T, N> HistoryBuffer<T, N>
impl<T, const N: usize> HistoryBuffer<T, N>
where
N: ArrayLength<T>,
T: Clone,
T: Copy + Clone,
{
/// Constructs a new history buffer, where every element is the given value.
///
@ -89,16 +83,15 @@ where
///
/// ```
/// use heapless::HistoryBuffer;
/// use heapless::consts::*;
///
/// // Allocate a 16-element buffer on the stack
/// let mut x: HistoryBuffer<u8, U16> = HistoryBuffer::new_with(4);
/// let mut x: HistoryBuffer<u8, 16> = HistoryBuffer::new_with(4);
/// // All elements are four
/// assert_eq!(x.as_slice(), [4; 16]);
/// ```
pub fn new_with(t: T) -> Self {
Self {
data: GenericArray::generate(|_| t.clone()),
data: [t; N],
write_at: 0,
}
}
@ -109,10 +102,7 @@ where
}
}
impl<T, N> HistoryBuffer<T, N>
where
N: ArrayLength<T>,
{
impl<T, const N: usize> HistoryBuffer<T, N> {
/// Returns the capacity of the buffer, which is the length of the
/// underlying backing array.
pub fn len(&self) -> usize {
@ -147,9 +137,8 @@ where
///
/// ```
/// use heapless::HistoryBuffer;
/// use heapless::consts::*;
///
/// let mut x: HistoryBuffer<u8, U16> = HistoryBuffer::new();
/// let mut x: HistoryBuffer<u8, 16> = HistoryBuffer::new();
/// x.write(4);
/// x.write(10);
/// assert_eq!(x.recent(), &10);
@ -169,10 +158,7 @@ where
}
}
impl<T, N> Extend<T> for HistoryBuffer<T, N>
where
N: ArrayLength<T>,
{
impl<T, const N: usize> Extend<T> for HistoryBuffer<T, N> {
fn extend<I>(&mut self, iter: I)
where
I: IntoIterator<Item = T>,
@ -183,10 +169,9 @@ where
}
}
impl<'a, T, N> Extend<&'a T> for HistoryBuffer<T, N>
impl<'a, T, const N: usize> Extend<&'a T> for HistoryBuffer<T, N>
where
T: 'a + Clone,
N: ArrayLength<T>,
{
fn extend<I>(&mut self, iter: I)
where
@ -198,21 +183,21 @@ where
#[cfg(test)]
mod tests {
use crate::{consts::*, HistoryBuffer};
use crate::HistoryBuffer;
#[test]
fn new() {
let x: HistoryBuffer<u8, U4> = HistoryBuffer::new_with(1);
let x: HistoryBuffer<u8, 4> = HistoryBuffer::new_with(1);
assert_eq!(x.len(), 4);
assert_eq!(x.as_slice(), [1; 4]);
let x: HistoryBuffer<u8, U4> = HistoryBuffer::new();
let x: HistoryBuffer<u8, 4> = HistoryBuffer::new();
assert_eq!(x.as_slice(), [0; 4]);
}
#[test]
fn write() {
let mut x: HistoryBuffer<u8, U4> = HistoryBuffer::new();
let mut x: HistoryBuffer<u8, 4> = HistoryBuffer::new();
x.write(1);
x.write(4);
assert_eq!(x.as_slice(), [1, 4, 0, 0]);
@ -228,18 +213,18 @@ mod tests {
#[test]
fn clear() {
let mut x: HistoryBuffer<u8, U4> = HistoryBuffer::new_with(1);
let mut x: HistoryBuffer<u8, 4> = HistoryBuffer::new_with(1);
x.clear();
assert_eq!(x.as_slice(), [0; 4]);
let mut x: HistoryBuffer<u8, U4> = HistoryBuffer::new();
let mut x: HistoryBuffer<u8, 4> = HistoryBuffer::new();
x.clear_with(1);
assert_eq!(x.as_slice(), [1; 4]);
}
#[test]
fn recent() {
let mut x: HistoryBuffer<u8, U4> = HistoryBuffer::new();
let mut x: HistoryBuffer<u8, 4> = HistoryBuffer::new();
assert_eq!(x.recent(), &0);
x.write(1);
@ -254,7 +239,7 @@ mod tests {
#[test]
fn as_slice() {
let mut x: HistoryBuffer<u8, U4> = HistoryBuffer::new();
let mut x: HistoryBuffer<u8, 4> = HistoryBuffer::new();
x.extend([1, 2, 3, 4, 5].iter());

View File

@ -7,13 +7,13 @@ use core::{
ops, slice,
};
use generic_array::{typenum::PowerOfTwo, ArrayLength, GenericArray};
// use generic_array::{typenum::PowerOfTwo, ArrayLength, GenericArray};
use hash32::{BuildHasher, BuildHasherDefault, FnvHasher, Hash, Hasher};
use crate::Vec;
/// An `IndexMap` using the default FNV hasher
pub type FnvIndexMap<K, V, N> = IndexMap<K, V, N, BuildHasherDefault<FnvHasher>>;
pub type FnvIndexMap<K, V, const N: usize> = IndexMap<K, V, BuildHasherDefault<FnvHasher>, N>;
#[derive(Clone, Copy, Eq, PartialEq)]
struct HashValue(u16);
@ -85,19 +85,17 @@ macro_rules! probe_loop {
}
}
struct CoreMap<K, V, N>
struct CoreMap<K, V, const N: usize>
where
K: Eq + Hash,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
entries: Vec<Bucket<K, V>, N>,
indices: GenericArray<Option<Pos>, N>,
indices: [Option<Pos>; N],
}
impl<K, V, N> CoreMap<K, V, N>
impl<K, V, const N: usize> CoreMap<K, V, N>
where
K: Eq + Hash,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
// TODO turn into a `const fn`; needs `mem::zeroed` to be a `const fn`
fn new() -> Self {
@ -108,7 +106,7 @@ where
}
fn capacity() -> usize {
N::to_usize()
N
}
fn mask() -> usize {
@ -270,11 +268,10 @@ where
}
}
impl<K, V, N> Clone for CoreMap<K, V, N>
impl<K, V, const N: usize> Clone for CoreMap<K, V, N>
where
K: Eq + Hash + Clone,
V: Clone,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn clone(&self) -> Self {
Self {
@ -292,10 +289,9 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// // A hash map with a capacity of 16 key-value pairs allocated on the stack
/// let mut book_reviews = FnvIndexMap::<_, _, U16>::new();
/// let mut book_reviews = FnvIndexMap::<_, _, 16>::new();
///
/// // review some books.
/// book_reviews.insert("Adventures of Huckleberry Finn", "My favorite book.").unwrap();
@ -326,20 +322,18 @@ where
/// println!("{}: \"{}\"", book, review);
/// }
/// ```
pub struct IndexMap<K, V, N, S>
pub struct IndexMap<K, V, S, const N: usize>
where
K: Eq + Hash,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
core: CoreMap<K, V, N>,
build_hasher: S,
}
impl<K, V, N, S> IndexMap<K, V, N, BuildHasherDefault<S>>
impl<K, V, S, const N: usize> IndexMap<K, V, BuildHasherDefault<S>, N>
where
K: Eq + Hash,
S: Default + Hasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>> + PowerOfTwo,
{
// TODO turn into a `const fn`; needs `mem::zeroed` to be a `const fn`
/// Creates an empty `IndexMap`.
@ -353,25 +347,23 @@ where
}
}
impl<K, V, N, S> IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> IndexMap<K, V, S, N>
where
K: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
/* Public API */
/// Returns the number of elements the map can hold
pub fn capacity(&self) -> usize {
N::to_usize()
N
}
/// Return an iterator over the keys of the map, in their order
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U16>::new();
/// let mut map = FnvIndexMap::<_, _, 16>::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -388,9 +380,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U16>::new();
/// let mut map = FnvIndexMap::<_, _, 16>::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -407,9 +398,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U16>::new();
/// let mut map = FnvIndexMap::<_, _, 16>::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -430,9 +420,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U16>::new();
/// let mut map = FnvIndexMap::<_, _, 16>::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -451,9 +440,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U16>::new();
/// let mut map = FnvIndexMap::<_, _, 16>::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -481,9 +469,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut a = FnvIndexMap::<_, _, U16>::new();
/// let mut a = FnvIndexMap::<_, _, 16>::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a").unwrap();
/// assert_eq!(a.len(), 1);
@ -498,9 +485,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut a = FnvIndexMap::<_, _, U16>::new();
/// let mut a = FnvIndexMap::<_, _, 16>::new();
/// assert!(a.is_empty());
/// a.insert(1, "a");
/// assert!(!a.is_empty());
@ -515,9 +501,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut a = FnvIndexMap::<_, _, U16>::new();
/// let mut a = FnvIndexMap::<_, _, 16>::new();
/// a.insert(1, "a");
/// a.clear();
/// assert!(a.is_empty());
@ -538,9 +523,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U16>::new();
/// let mut map = FnvIndexMap::<_, _, 16>::new();
/// map.insert(1, "a").unwrap();
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
@ -565,9 +549,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U8>::new();
/// let mut map = FnvIndexMap::<_, _, 8>::new();
/// map.insert(1, "a").unwrap();
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
@ -591,9 +574,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U8>::new();
/// let mut map = FnvIndexMap::<_, _, 8>::new();
/// map.insert(1, "a").unwrap();
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
@ -630,9 +612,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U8>::new();
/// let mut map = FnvIndexMap::<_, _, 8>::new();
/// assert_eq!(map.insert(37, "a"), Ok(None));
/// assert_eq!(map.is_empty(), false);
///
@ -663,9 +644,8 @@ where
///
/// ```
/// use heapless::FnvIndexMap;
/// use heapless::consts::*;
///
/// let mut map = FnvIndexMap::<_, _, U8>::new();
/// let mut map = FnvIndexMap::<_, _, 8>::new();
/// map.insert(1, "a").unwrap();
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
@ -715,12 +695,12 @@ where
}
}
impl<'a, K, Q, V, N, S> ops::Index<&'a Q> for IndexMap<K, V, N, S>
impl<'a, K, Q, V, S, const N: usize> ops::Index<&'a Q> for IndexMap<K, V, S, N>
where
K: Eq + Hash + Borrow<Q>,
Q: ?Sized + Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
// N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
type Output = V;
@ -729,24 +709,24 @@ where
}
}
impl<'a, K, Q, V, N, S> ops::IndexMut<&'a Q> for IndexMap<K, V, N, S>
impl<'a, K, Q, V, S, const N: usize> ops::IndexMut<&'a Q> for IndexMap<K, V, S, N>
where
K: Eq + Hash + Borrow<Q>,
Q: ?Sized + Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
// N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn index_mut(&mut self, key: &Q) -> &mut V {
self.get_mut(key).expect("key not found")
}
}
impl<K, V, N, S> Clone for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> Clone for IndexMap<K, V, S, N>
where
K: Eq + Hash + Clone,
V: Clone,
S: Clone,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
// N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn clone(&self) -> Self {
Self {
@ -756,23 +736,23 @@ where
}
}
impl<K, V, N, S> fmt::Debug for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> fmt::Debug for IndexMap<K, V, S, N>
where
K: Eq + Hash + fmt::Debug,
V: fmt::Debug,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
// N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
impl<K, V, N, S> Default for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> Default for IndexMap<K, V, S, N>
where
K: Eq + Hash,
S: BuildHasher + Default,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
// N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn default() -> Self {
IndexMap {
@ -782,16 +762,15 @@ where
}
}
impl<K, V, N, S, N2, S2> PartialEq<IndexMap<K, V, N2, S2>> for IndexMap<K, V, N, S>
impl<K, V, S, S2, const N: usize, const N2: usize> PartialEq<IndexMap<K, V, S2, N2>>
for IndexMap<K, V, S, N>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
N2: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn eq(&self, other: &IndexMap<K, V, N2, S2>) -> bool {
fn eq(&self, other: &IndexMap<K, V, S2, N2>) -> bool {
self.len() == other.len()
&& self
.iter()
@ -799,20 +778,18 @@ where
}
}
impl<K, V, N, S> Eq for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> Eq for IndexMap<K, V, S, N>
where
K: Eq + Hash,
V: Eq,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
}
impl<K, V, N, S> Extend<(K, V)> for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> Extend<(K, V)> for IndexMap<K, V, S, N>
where
K: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn extend<I>(&mut self, iterable: I)
where
@ -824,12 +801,11 @@ where
}
}
impl<'a, K, V, N, S> Extend<(&'a K, &'a V)> for IndexMap<K, V, N, S>
impl<'a, K, V, S, const N: usize> Extend<(&'a K, &'a V)> for IndexMap<K, V, S, N>
where
K: Eq + Hash + Copy,
V: Copy,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn extend<I>(&mut self, iterable: I)
where
@ -839,11 +815,10 @@ where
}
}
impl<K, V, N, S> FromIterator<(K, V)> for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> FromIterator<(K, V)> for IndexMap<K, V, S, N>
where
K: Eq + Hash,
S: BuildHasher + Default,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
fn from_iter<I>(iterable: I) -> Self
where
@ -855,11 +830,10 @@ where
}
}
impl<'a, K, V, N, S> IntoIterator for &'a IndexMap<K, V, N, S>
impl<'a, K, V, S, const N: usize> IntoIterator for &'a IndexMap<K, V, S, N>
where
K: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
type Item = (&'a K, &'a V);
type IntoIter = Iter<'a, K, V>;
@ -869,11 +843,10 @@ where
}
}
impl<'a, K, V, N, S> IntoIterator for &'a mut IndexMap<K, V, N, S>
impl<'a, K, V, S, const N: usize> IntoIterator for &'a mut IndexMap<K, V, S, N>
where
K: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
{
type Item = (&'a K, &'a mut V);
type IntoIter = IterMut<'a, K, V>;
@ -929,21 +902,16 @@ where
#[cfg(test)]
mod tests {
use crate::FnvIndexMap;
use core::mem;
use generic_array::typenum::Unsigned;
use crate::{consts::*, FnvIndexMap};
#[test]
fn size() {
type Cap = U4;
let cap = Cap::to_usize();
const CAP: usize = 4;
assert_eq!(
mem::size_of::<FnvIndexMap<i16, u16, Cap>>(),
cap * mem::size_of::<u32>() + // indices
cap * (mem::size_of::<i16>() + // key
mem::size_of::<FnvIndexMap<i16, u16, CAP>>(),
CAP * mem::size_of::<u32>() + // indices
CAP * (mem::size_of::<i16>() + // key
mem::size_of::<u16>() + // value
mem::size_of::<u16>() // hash
) + // buckets
@ -954,10 +922,10 @@ mod tests {
#[test]
fn partial_eq() {
{
let mut a: FnvIndexMap<_, _, U4> = FnvIndexMap::new();
let mut a: FnvIndexMap<_, _, 4> = FnvIndexMap::new();
a.insert("k1", "v1").unwrap();
let mut b: FnvIndexMap<_, _, U4> = FnvIndexMap::new();
let mut b: FnvIndexMap<_, _, 4> = FnvIndexMap::new();
b.insert("k1", "v1").unwrap();
assert!(a == b);
@ -968,11 +936,11 @@ mod tests {
}
{
let mut a: FnvIndexMap<_, _, U4> = FnvIndexMap::new();
let mut a: FnvIndexMap<_, _, 4> = FnvIndexMap::new();
a.insert("k1", "v1").unwrap();
a.insert("k2", "v2").unwrap();
let mut b: FnvIndexMap<_, _, U4> = FnvIndexMap::new();
let mut b: FnvIndexMap<_, _, 4> = FnvIndexMap::new();
b.insert("k2", "v2").unwrap();
b.insert("k1", "v1").unwrap();

View File

@ -1,12 +1,13 @@
use core::{borrow::Borrow, fmt, iter::FromIterator};
use generic_array::{typenum::PowerOfTwo, ArrayLength};
use hash32::{BuildHasher, BuildHasherDefault, FnvHasher, Hash, Hasher};
use crate::indexmap::{self, Bucket, IndexMap, Pos};
/// An `IndexSet` using the default FNV hasher
pub type FnvIndexSet<T, N> = IndexSet<T, N, BuildHasherDefault<FnvHasher>>;
pub type FnvIndexSet<T, const N: usize> = IndexSet<T, BuildHasherDefault<FnvHasher>, N>;
// TODO: We don't enforce the power of 2 currently (part of generic array bounds)
/// Fixed capacity [`IndexSet`](https://docs.rs/indexmap/1/indexmap/set/struct.IndexSet.html)
///
@ -16,10 +17,9 @@ pub type FnvIndexSet<T, N> = IndexSet<T, N, BuildHasherDefault<FnvHasher>>;
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// // A hash set with a capacity of 16 elements allocated on the stack
/// let mut books = FnvIndexSet::<_, U16>::new();
/// let mut books = FnvIndexSet::<_, 16>::new();
///
/// // Add some books.
/// books.insert("A Dance With Dragons").unwrap();
@ -41,19 +41,17 @@ pub type FnvIndexSet<T, N> = IndexSet<T, N, BuildHasherDefault<FnvHasher>>;
/// println!("{}", book);
/// }
/// ```
pub struct IndexSet<T, N, S>
pub struct IndexSet<T, S, const N: usize>
where
T: Eq + Hash,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
map: IndexMap<T, (), N, S>,
map: IndexMap<T, (), S, N>,
}
impl<T, N, S> IndexSet<T, N, BuildHasherDefault<S>>
impl<T, S, const N: usize> IndexSet<T, BuildHasherDefault<S>, N>
where
T: Eq + Hash,
S: Default + Hasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>> + PowerOfTwo,
{
/// Creates an empty `IndexSet`
pub fn new() -> Self {
@ -63,11 +61,10 @@ where
}
}
impl<T, N, S> IndexSet<T, N, S>
impl<T, S, const N: usize> IndexSet<T, S, N>
where
T: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
/// Returns the number of elements the set can hold
///
@ -75,9 +72,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let set = FnvIndexSet::<i32, U16>::new();
/// let set = FnvIndexSet::<i32, 16>::new();
/// assert_eq!(set.capacity(), 16);
/// ```
pub fn capacity(&self) -> usize {
@ -90,9 +86,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut set = FnvIndexSet::<_, U16>::new();
/// let mut set = FnvIndexSet::<_, 16>::new();
/// set.insert("a").unwrap();
/// set.insert("b").unwrap();
///
@ -114,30 +109,28 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect();
/// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Can be seen as `a - b`.
/// for x in a.difference(&b) {
/// println!("{}", x); // Print 1
/// }
///
/// let diff: FnvIndexSet<_, U16> = a.difference(&b).collect();
/// assert_eq!(diff, [1].iter().collect::<FnvIndexSet<_, U16>>());
/// let diff: FnvIndexSet<_, 16> = a.difference(&b).collect();
/// assert_eq!(diff, [1].iter().collect::<FnvIndexSet<_, 16>>());
///
/// // Note that difference is not symmetric,
/// // and `b - a` means something else:
/// let diff: FnvIndexSet<_, U16> = b.difference(&a).collect();
/// assert_eq!(diff, [4].iter().collect::<FnvIndexSet<_, U16>>());
/// let diff: FnvIndexSet<_, 16> = b.difference(&a).collect();
/// assert_eq!(diff, [4].iter().collect::<FnvIndexSet<_, 16>>());
/// ```
pub fn difference<'a, N2, S2>(
pub fn difference<'a, S2, const N2: usize>(
&'a self,
other: &'a IndexSet<T, N2, S2>,
) -> Difference<'a, T, N2, S2>
other: &'a IndexSet<T, S2, N2>,
) -> Difference<'a, T, S2, N2>
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
Difference {
@ -153,28 +146,26 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect();
/// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Print 1, 4 in that order order.
/// for x in a.symmetric_difference(&b) {
/// println!("{}", x);
/// }
///
/// let diff1: FnvIndexSet<_, U16> = a.symmetric_difference(&b).collect();
/// let diff2: FnvIndexSet<_, U16> = b.symmetric_difference(&a).collect();
/// let diff1: FnvIndexSet<_, 16> = a.symmetric_difference(&b).collect();
/// let diff2: FnvIndexSet<_, 16> = b.symmetric_difference(&a).collect();
///
/// assert_eq!(diff1, diff2);
/// assert_eq!(diff1, [1, 4].iter().collect::<FnvIndexSet<_, U16>>());
/// assert_eq!(diff1, [1, 4].iter().collect::<FnvIndexSet<_, 16>>());
/// ```
pub fn symmetric_difference<'a, N2, S2>(
pub fn symmetric_difference<'a, S2, const N2: usize>(
&'a self,
other: &'a IndexSet<T, N2, S2>,
other: &'a IndexSet<T, S2, N2>,
) -> impl Iterator<Item = &'a T>
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
self.difference(other).chain(other.difference(self))
@ -187,25 +178,23 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect();
/// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Print 2, 3 in that order.
/// for x in a.intersection(&b) {
/// println!("{}", x);
/// }
///
/// let intersection: FnvIndexSet<_, U16> = a.intersection(&b).collect();
/// assert_eq!(intersection, [2, 3].iter().collect::<FnvIndexSet<_, U16>>());
/// let intersection: FnvIndexSet<_, 16> = a.intersection(&b).collect();
/// assert_eq!(intersection, [2, 3].iter().collect::<FnvIndexSet<_, 16>>());
/// ```
pub fn intersection<'a, N2, S2>(
pub fn intersection<'a, S2, const N2: usize>(
&'a self,
other: &'a IndexSet<T, N2, S2>,
) -> Intersection<'a, T, N2, S2>
other: &'a IndexSet<T, S2, N2>,
) -> Intersection<'a, T, S2, N2>
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
Intersection {
@ -221,25 +210,23 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, U16> = [4, 2, 3, 4].iter().cloned().collect();
/// let mut a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// let mut b: FnvIndexSet<_, 16> = [4, 2, 3, 4].iter().cloned().collect();
///
/// // Print 1, 2, 3, 4 in that order.
/// for x in a.union(&b) {
/// println!("{}", x);
/// }
///
/// let union: FnvIndexSet<_, U16> = a.union(&b).collect();
/// assert_eq!(union, [1, 2, 3, 4].iter().collect::<FnvIndexSet<_, U16>>());
/// let union: FnvIndexSet<_, 16> = a.union(&b).collect();
/// assert_eq!(union, [1, 2, 3, 4].iter().collect::<FnvIndexSet<_, 16>>());
/// ```
pub fn union<'a, N2, S2>(
pub fn union<'a, S2, const N2: usize>(
&'a self,
other: &'a IndexSet<T, N2, S2>,
other: &'a IndexSet<T, S2, N2>,
) -> impl Iterator<Item = &'a T>
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
self.iter().chain(other.difference(self))
@ -251,9 +238,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut v: FnvIndexSet<_, U16> = FnvIndexSet::new();
/// let mut v: FnvIndexSet<_, 16> = FnvIndexSet::new();
/// assert_eq!(v.len(), 0);
/// v.insert(1).unwrap();
/// assert_eq!(v.len(), 1);
@ -268,9 +254,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut v: FnvIndexSet<_, U16> = FnvIndexSet::new();
/// let mut v: FnvIndexSet<_, 16> = FnvIndexSet::new();
/// assert!(v.is_empty());
/// v.insert(1).unwrap();
/// assert!(!v.is_empty());
@ -285,9 +270,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut v: FnvIndexSet<_, U16> = FnvIndexSet::new();
/// let mut v: FnvIndexSet<_, 16> = FnvIndexSet::new();
/// v.insert(1).unwrap();
/// v.clear();
/// assert!(v.is_empty());
@ -305,9 +289,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let set: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let set: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// assert_eq!(set.contains(&1), true);
/// assert_eq!(set.contains(&4), false);
/// ```
@ -326,10 +309,9 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let a: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let mut b = FnvIndexSet::<_, U16>::new();
/// let a: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// let mut b = FnvIndexSet::<_, 16>::new();
///
/// assert_eq!(a.is_disjoint(&b), true);
/// b.insert(4).unwrap();
@ -337,9 +319,8 @@ where
/// b.insert(1).unwrap();
/// assert_eq!(a.is_disjoint(&b), false);
/// ```
pub fn is_disjoint<N2, S2>(&self, other: &IndexSet<T, N2, S2>) -> bool
pub fn is_disjoint<S2, const N2: usize>(&self, other: &IndexSet<T, S2, N2>) -> bool
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
self.iter().all(|v| !other.contains(v))
@ -352,10 +333,9 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let sup: FnvIndexSet<_, U16> = [1, 2, 3].iter().cloned().collect();
/// let mut set = FnvIndexSet::<_, U16>::new();
/// let sup: FnvIndexSet<_, 16> = [1, 2, 3].iter().cloned().collect();
/// let mut set = FnvIndexSet::<_, 16>::new();
///
/// assert_eq!(set.is_subset(&sup), true);
/// set.insert(2).unwrap();
@ -363,9 +343,8 @@ where
/// set.insert(4).unwrap();
/// assert_eq!(set.is_subset(&sup), false);
/// ```
pub fn is_subset<N2, S2>(&self, other: &IndexSet<T, N2, S2>) -> bool
pub fn is_subset<S2, const N2: usize>(&self, other: &IndexSet<T, S2, N2>) -> bool
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
self.iter().all(|v| other.contains(v))
@ -378,10 +357,9 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let sub: FnvIndexSet<_, U16> = [1, 2].iter().cloned().collect();
/// let mut set = FnvIndexSet::<_, U16>::new();
/// let sub: FnvIndexSet<_, 16> = [1, 2].iter().cloned().collect();
/// let mut set = FnvIndexSet::<_, 16>::new();
///
/// assert_eq!(set.is_superset(&sub), false);
///
@ -392,9 +370,8 @@ where
/// set.insert(2).unwrap();
/// assert_eq!(set.is_superset(&sub), true);
/// ```
pub fn is_superset<N2, S2>(&self, other: &IndexSet<T, N2, S2>) -> bool
pub fn is_superset<S2, const N2: usize>(&self, other: &IndexSet<T, S2, N2>) -> bool
where
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
S2: BuildHasher,
{
other.is_subset(self)
@ -410,9 +387,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut set = FnvIndexSet::<_, U16>::new();
/// let mut set = FnvIndexSet::<_, 16>::new();
///
/// assert_eq!(set.insert(2).unwrap(), true);
/// assert_eq!(set.insert(2).unwrap(), false);
@ -434,9 +410,8 @@ where
///
/// ```
/// use heapless::FnvIndexSet;
/// use heapless::consts::*;
///
/// let mut set = FnvIndexSet::<_, U16>::new();
/// let mut set = FnvIndexSet::<_, 16>::new();
///
/// set.insert(2).unwrap();
/// assert_eq!(set.remove(&2), true);
@ -451,11 +426,10 @@ where
}
}
impl<T, N, S> Clone for IndexSet<T, N, S>
impl<T, S, const N: usize> Clone for IndexSet<T, S, N>
where
T: Eq + Hash + Clone,
S: Clone,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn clone(&self) -> Self {
Self {
@ -464,22 +438,20 @@ where
}
}
impl<T, N, S> fmt::Debug for IndexSet<T, N, S>
impl<T, S, const N: usize> fmt::Debug for IndexSet<T, S, N>
where
T: Eq + Hash + fmt::Debug,
S: BuildHasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_set().entries(self.iter()).finish()
}
}
impl<T, N, S> Default for IndexSet<T, N, S>
impl<T, S, const N: usize> Default for IndexSet<T, S, N>
where
T: Eq + Hash,
S: BuildHasher + Default,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn default() -> Self {
IndexSet {
@ -488,24 +460,22 @@ where
}
}
impl<T, N1, N2, S1, S2> PartialEq<IndexSet<T, N2, S2>> for IndexSet<T, N1, S1>
impl<T, S1, S2, const N1: usize, const N2: usize> PartialEq<IndexSet<T, S2, N2>>
for IndexSet<T, S1, N1>
where
T: Eq + Hash,
S1: BuildHasher,
S2: BuildHasher,
N1: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
N2: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn eq(&self, other: &IndexSet<T, N2, S2>) -> bool {
fn eq(&self, other: &IndexSet<T, S2, N2>) -> bool {
self.len() == other.len() && self.is_subset(other)
}
}
impl<T, N, S> Extend<T> for IndexSet<T, N, S>
impl<T, S, const N: usize> Extend<T> for IndexSet<T, S, N>
where
T: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn extend<I>(&mut self, iterable: I)
where
@ -515,11 +485,10 @@ where
}
}
impl<'a, T, N, S> Extend<&'a T> for IndexSet<T, N, S>
impl<'a, T, S, const N: usize> Extend<&'a T> for IndexSet<T, S, N>
where
T: 'a + Eq + Hash + Copy,
S: BuildHasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn extend<I>(&mut self, iterable: I)
where
@ -529,11 +498,10 @@ where
}
}
impl<T, N, S> FromIterator<T> for IndexSet<T, N, S>
impl<T, S, const N: usize> FromIterator<T> for IndexSet<T, S, N>
where
T: Eq + Hash,
S: BuildHasher + Default,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
fn from_iter<I>(iter: I) -> Self
where
@ -545,11 +513,10 @@ where
}
}
impl<'a, T, N, S> IntoIterator for &'a IndexSet<T, N, S>
impl<'a, T, S, const N: usize> IntoIterator for &'a IndexSet<T, S, N>
where
T: Eq + Hash,
S: BuildHasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
type Item = &'a T;
type IntoIter = Iter<'a, T>;
@ -579,21 +546,19 @@ impl<'a, T> Clone for Iter<'a, T> {
}
}
pub struct Difference<'a, T, N, S>
pub struct Difference<'a, T, S, const N: usize>
where
S: BuildHasher,
T: Eq + Hash,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
iter: Iter<'a, T>,
other: &'a IndexSet<T, N, S>,
other: &'a IndexSet<T, S, N>,
}
impl<'a, T, N, S> Iterator for Difference<'a, T, N, S>
impl<'a, T, S, const N: usize> Iterator for Difference<'a, T, S, N>
where
S: BuildHasher,
T: Eq + Hash,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
type Item = &'a T;
@ -607,21 +572,19 @@ where
}
}
pub struct Intersection<'a, T, N, S>
pub struct Intersection<'a, T, S, const N: usize>
where
S: BuildHasher,
T: Eq + Hash,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
iter: Iter<'a, T>,
other: &'a IndexSet<T, N, S>,
other: &'a IndexSet<T, S, N>,
}
impl<'a, T, N, S> Iterator for Intersection<'a, T, N, S>
impl<'a, T, S, const N: usize> Iterator for Intersection<'a, T, S, N>
where
S: BuildHasher,
T: Eq + Hash,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>>,
{
type Item = &'a T;

View File

@ -10,17 +10,14 @@
//!
//! ```
//! use heapless::Vec; // fixed capacity `std::Vec`
//! use heapless::consts::U8; // type level integer used to specify capacity
//!
//! // on the stack
//! let mut xs: Vec<u8, U8> = Vec::new(); // can hold up to 8 elements
//! let mut xs: Vec<u8, 8> = Vec::new(); // can hold up to 8 elements
//! xs.push(42).unwrap();
//! assert_eq!(xs.pop(), Some(42));
//!
//! // in a `static` variable
//! // (because `const-fn` has not been fully stabilized you need to use the helper structs in
//! // the `i` module, which must be wrapped in a tuple struct)
//! static mut XS: Vec<u8, U8> = Vec(heapless::i::Vec::new());
//! static mut XS: Vec<u8, 8> = Vec::new();
//!
//! let xs = unsafe { &mut XS };
//!
@ -28,7 +25,7 @@
//! assert_eq!(xs.pop(), Some(42));
//!
//! // in the heap (though kind of pointless because no reallocation)
//! let mut ys: Box<Vec<u8, U8>> = Box::new(Vec::new());
//! let mut ys: Box<Vec<u8, 8>> = Box::new(Vec::new());
//! ys.push(42).unwrap();
//! assert_eq!(ys.pop(), Some(42));
//! ```
@ -69,29 +66,32 @@
//! This crate is guaranteed to compile on stable Rust 1.36 and up with its default set of features.
//! It *might* compile on older versions but that may change in any new patch release.
// experimental usage of const generics, requires nightly 2020-08-18 (or newer)
#![feature(min_const_generics)]
#![feature(const_fn)]
#![cfg_attr(not(test), no_std)]
#![deny(missing_docs)]
#![deny(rust_2018_compatibility)]
#![deny(rust_2018_idioms)]
#![deny(warnings)]
// #![deny(warnings)]
pub use binary_heap::BinaryHeap;
pub use generic_array::typenum::{consts, PowerOfTwo};
pub use generic_array::ArrayLength;
// pub use generic_array::typenum::{consts, PowerOfTwo};
// pub use generic_array::ArrayLength;
pub use histbuf::HistoryBuffer;
pub use indexmap::{Bucket, FnvIndexMap, IndexMap, Pos};
pub use indexset::{FnvIndexSet, IndexSet};
pub use linear_map::LinearMap;
pub use string::String;
pub use vec::Vec;
pub use histbuf::HistoryBuffer;
// NOTE this code was last ported from v0.4.1 of the indexmap crate
mod histbuf;
mod indexmap;
mod indexset;
mod linear_map;
mod string;
mod vec;
mod histbuf;
#[cfg(feature = "serde")]
mod de;
@ -99,10 +99,10 @@ mod de;
mod ser;
pub mod binary_heap;
pub mod i;
// pub mod i;
#[cfg(all(has_cas, feature = "cas"))]
pub mod mpmc;
#[cfg(all(has_cas, feature = "cas"))]
// #[cfg(all(has_cas, feature = "cas"))]
pub mod pool;
#[cfg(has_atomics)]
pub mod spsc;

View File

@ -6,31 +6,21 @@ use core::{
ops, ptr, slice,
};
use generic_array::{ArrayLength, GenericArray};
use crate::Vec;
/// A fixed capacity map / dictionary that performs lookups via linear search
///
/// Note that as this map doesn't use hashing so most operations are **O(N)** instead of O(1)
pub struct LinearMap<K, V, N>(#[doc(hidden)] pub crate::i::LinearMap<GenericArray<(K, V), N>>)
where
N: ArrayLength<(K, V)>,
K: Eq;
impl<A> crate::i::LinearMap<A> {
/// `LinearMap` `const` constructor; wrap the returned value in
/// [`LinearMap`](../struct.LinearMap.html)
pub const fn new() -> Self {
Self {
buffer: crate::i::Vec::new(),
}
}
pub struct LinearMap<K, V, const N: usize>
where
K: Eq,
{
pub(crate) buffer: Vec<(K, V), N>,
}
impl<K, V, N> LinearMap<K, V, N>
impl<K, V, const N: usize> LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
/// Creates an empty `LinearMap`
@ -39,16 +29,15 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// // allocate the map on the stack
/// let mut map: LinearMap<&str, isize, U8> = LinearMap::new();
/// let mut map: LinearMap<&str, isize, 8> = LinearMap::new();
///
/// // allocate the map in a static variable
/// static mut MAP: LinearMap<&str, isize, U8> = LinearMap(heapless::i::LinearMap::new());
/// static mut MAP: LinearMap<&str, isize, 8> = LinearMap::new();
/// ```
pub fn new() -> Self {
LinearMap(crate::i::LinearMap::new())
pub const fn new() -> Self {
Self { buffer: Vec::new() }
}
/// Returns the number of elements that the map can hold
@ -59,13 +48,12 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let map: LinearMap<&str, isize, U8> = LinearMap::new();
/// let map: LinearMap<&str, isize, 8> = LinearMap::new();
/// assert_eq!(map.capacity(), 8);
/// ```
pub fn capacity(&self) -> usize {
N::to_usize()
N
}
/// Clears the map, removing all key-value pairs
@ -76,15 +64,14 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert(1, "a").unwrap();
/// map.clear();
/// assert!(map.is_empty());
/// ```
pub fn clear(&mut self) {
self.0.buffer.clear()
self.buffer.clear()
}
/// Returns true if the map contains a value for the specified key.
@ -95,9 +82,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert(1, "a").unwrap();
/// assert_eq!(map.contains_key(&1), true);
/// assert_eq!(map.contains_key(&2), false);
@ -114,9 +100,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert(1, "a").unwrap();
/// assert_eq!(map.get(&1), Some(&"a"));
/// assert_eq!(map.get(&2), None);
@ -139,9 +124,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert(1, "a").unwrap();
/// if let Some(x) = map.get_mut(&1) {
/// *x = "b";
@ -166,15 +150,14 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut a: LinearMap<_, _, U8> = LinearMap::new();
/// let mut a: LinearMap<_, _, 8> = LinearMap::new();
/// assert_eq!(a.len(), 0);
/// a.insert(1, "a").unwrap();
/// assert_eq!(a.len(), 1);
/// ```
pub fn len(&self) -> usize {
self.0.buffer.len
self.buffer.len()
}
/// Inserts a key-value pair into the map.
@ -189,9 +172,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// assert_eq!(map.insert(37, "a").unwrap(), None);
/// assert_eq!(map.is_empty(), false);
///
@ -205,7 +187,7 @@ where
return Ok(Some(value));
}
self.0.buffer.push((key, value))?;
self.buffer.push((key, value))?;
Ok(None)
}
@ -217,9 +199,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut a: LinearMap<_, _, U8> = LinearMap::new();
/// let mut a: LinearMap<_, _, 8> = LinearMap::new();
/// assert!(a.is_empty());
/// a.insert(1, "a").unwrap();
/// assert!(!a.is_empty());
@ -234,9 +215,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -247,7 +227,7 @@ where
/// ```
pub fn iter(&self) -> Iter<'_, K, V> {
Iter {
iter: self.0.buffer.as_slice().iter(),
iter: self.buffer.as_slice().iter(),
}
}
@ -258,9 +238,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -276,7 +255,7 @@ where
/// ```
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
IterMut {
iter: self.0.buffer.as_mut_slice().iter_mut(),
iter: self.buffer.as_mut_slice().iter_mut(),
}
}
@ -286,9 +265,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -310,9 +288,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert(1, "a").unwrap();
/// assert_eq!(map.remove(&1), Some("a"));
/// assert_eq!(map.remove(&1), None);
@ -328,7 +305,7 @@ where
.find(|&(_, k)| k.borrow() == key)
.map(|(idx, _)| idx);
idx.map(|idx| self.0.buffer.swap_remove(idx).1)
idx.map(|idx| self.buffer.swap_remove(idx).1)
}
/// An iterator visiting all values in arbitrary order
@ -337,9 +314,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -358,9 +334,8 @@ where
///
/// ```
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
/// let mut map: LinearMap<_, _, U8> = LinearMap::new();
/// let mut map: LinearMap<_, _, 8> = LinearMap::new();
/// map.insert("a", 1).unwrap();
/// map.insert("b", 2).unwrap();
/// map.insert("c", 3).unwrap();
@ -378,9 +353,8 @@ where
}
}
impl<'a, K, V, N, Q> ops::Index<&'a Q> for LinearMap<K, V, N>
impl<'a, K, V, Q, const N: usize> ops::Index<&'a Q> for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Borrow<Q> + Eq,
Q: Eq + ?Sized,
{
@ -391,9 +365,8 @@ where
}
}
impl<'a, K, V, N, Q> ops::IndexMut<&'a Q> for LinearMap<K, V, N>
impl<'a, K, V, Q, const N: usize> ops::IndexMut<&'a Q> for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Borrow<Q> + Eq,
Q: Eq + ?Sized,
{
@ -402,9 +375,8 @@ where
}
}
impl<K, V, N> Default for LinearMap<K, V, N>
impl<K, V, const N: usize> Default for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
fn default() -> Self {
@ -412,22 +384,20 @@ where
}
}
impl<K, V, N> Clone for LinearMap<K, V, N>
impl<K, V, const N: usize> Clone for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq + Clone,
V: Clone,
{
fn clone(&self) -> Self {
Self(crate::i::LinearMap {
buffer: self.0.buffer.clone(),
})
Self {
buffer: self.buffer.clone(),
}
}
}
impl<K, V, N> fmt::Debug for LinearMap<K, V, N>
impl<K, V, const N: usize> fmt::Debug for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq + fmt::Debug,
V: fmt::Debug,
{
@ -436,9 +406,8 @@ where
}
}
impl<K, V, N> FromIterator<(K, V)> for LinearMap<K, V, N>
impl<K, V, const N: usize> FromIterator<(K, V)> for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
fn from_iter<I>(iter: I) -> Self
@ -446,22 +415,20 @@ where
I: IntoIterator<Item = (K, V)>,
{
let mut out = Self::new();
out.0.buffer.extend(iter);
out.buffer.extend(iter);
out
}
}
pub struct IntoIter<K, V, N>
pub struct IntoIter<K, V, const N: usize>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
inner: <Vec<(K, V), N> as IntoIterator>::IntoIter,
}
impl<K, V, N> Iterator for IntoIter<K, V, N>
impl<K, V, const N: usize> Iterator for IntoIter<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
type Item = (K, V);
@ -470,28 +437,27 @@ where
}
}
impl<K, V, N> IntoIterator for LinearMap<K, V, N>
// TODO: Why is this needed at all, no example, no test... I don't get it
// impl<K, V, const N: usize> IntoIterator for LinearMap<K, V, N>
// where
// K: Eq,
// {
// type Item = (K, V);
// type IntoIter = IntoIter<K, V, N>;
// fn into_iter(mut self) -> Self::IntoIter {
// // FIXME this may result in a memcpy at runtime
// let lm = mem::replace(&mut self, unsafe { MaybeUninit::uninit().assume_init() });
// mem::forget(self);
// Self::IntoIter {
// inner: lm.buffer.into_iter(),
// }
// }
// }
impl<'a, K, V, const N: usize> IntoIterator for &'a LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V, N>;
fn into_iter(mut self) -> Self::IntoIter {
// FIXME this may result in a memcpy at runtime
let lm = mem::replace(&mut self.0, unsafe { MaybeUninit::uninit().assume_init() });
mem::forget(self);
Self::IntoIter {
inner: crate::Vec(lm.buffer).into_iter(),
}
}
}
impl<'a, K, V, N> IntoIterator for &'a LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
type Item = (&'a K, &'a V);
@ -522,13 +488,15 @@ impl<'a, K, V> Clone for Iter<'a, K, V> {
}
}
impl<K, V, N> Drop for LinearMap<K, V, N>
impl<K, V, const N: usize> Drop for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq,
{
fn drop(&mut self) {
unsafe { ptr::drop_in_place(self.0.buffer.as_mut_slice()) }
// heapless::Vec implements drop right?
drop(&self.buffer);
// original code below
// unsafe { ptr::drop_in_place(self.buffer.as_mut_slice()) }
}
}
@ -544,12 +512,10 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> {
}
}
impl<K, V, N, N2> PartialEq<LinearMap<K, V, N2>> for LinearMap<K, V, N>
impl<K, V, const N: usize, const N2: usize> PartialEq<LinearMap<K, V, N2>> for LinearMap<K, V, N>
where
K: Eq,
V: PartialEq,
N: ArrayLength<(K, V)>,
N2: ArrayLength<(K, V)>,
{
fn eq(&self, other: &LinearMap<K, V, N2>) -> bool {
self.len() == other.len()
@ -559,30 +525,29 @@ where
}
}
impl<K, V, N> Eq for LinearMap<K, V, N>
impl<K, V, const N: usize> Eq for LinearMap<K, V, N>
where
K: Eq,
V: PartialEq,
N: ArrayLength<(K, V)>,
{
}
#[cfg(test)]
mod test {
use crate::{consts::*, LinearMap};
use crate::LinearMap;
#[test]
fn static_new() {
static mut _L: LinearMap<i32, i32, U8> = LinearMap(crate::i::LinearMap::new());
static mut _L: LinearMap<i32, i32, 8> = LinearMap::new();
}
#[test]
fn partial_eq() {
{
let mut a = LinearMap::<_, _, U1>::new();
let mut a = LinearMap::<_, _, 1>::new();
a.insert("k1", "v1").unwrap();
let mut b = LinearMap::<_, _, U2>::new();
let mut b = LinearMap::<_, _, 2>::new();
b.insert("k1", "v1").unwrap();
assert!(a == b);
@ -593,15 +558,17 @@ mod test {
}
{
let mut a = LinearMap::<_, _, U2>::new();
let mut a = LinearMap::<_, _, 2>::new();
a.insert("k1", "v1").unwrap();
a.insert("k2", "v2").unwrap();
let mut b = LinearMap::<_, _, U2>::new();
let mut b = LinearMap::<_, _, 2>::new();
b.insert("k2", "v2").unwrap();
b.insert("k1", "v1").unwrap();
assert!(a == b);
}
}
// TODO: drop test
}

View File

@ -2,7 +2,6 @@
pub mod spsc {
#[cfg(has_atomics)]
use crate::spsc::{MultiCore, SingleCore};
#[cfg(has_atomics)]
use core::sync::atomic::{self, AtomicU16, AtomicU8, AtomicUsize, Ordering};

View File

@ -1,4 +1,6 @@
use generic_array::{typenum::PowerOfTwo, ArrayLength};
//! missing doc
// use generic_array::{typenum::PowerOfTwo, ArrayLength};
use hash32::{BuildHasher, Hash};
use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
@ -10,10 +12,9 @@ use crate::{
// Sequential containers
impl<T, N, KIND> Serialize for BinaryHeap<T, N, KIND>
impl<T, KIND, const N: usize> Serialize for BinaryHeap<T, KIND, N>
where
T: Ord + Serialize,
N: ArrayLength<T>,
KIND: BinaryHeapKind,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
@ -28,11 +29,10 @@ where
}
}
impl<T, N, S> Serialize for IndexSet<T, N, S>
impl<T, S, const N: usize> Serialize for IndexSet<T, S, N>
where
T: Eq + Hash + Serialize,
S: BuildHasher,
N: ArrayLength<Bucket<T, ()>> + ArrayLength<Option<Pos>> + PowerOfTwo,
{
fn serialize<SER>(&self, serializer: SER) -> Result<SER::Ok, SER::Error>
where
@ -46,10 +46,9 @@ where
}
}
impl<T, N> Serialize for Vec<T, N>
impl<T, const N: usize> Serialize for Vec<T, N>
where
T: Serialize,
N: ArrayLength<T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
@ -65,10 +64,9 @@ where
// Dictionaries
impl<K, V, N, S> Serialize for IndexMap<K, V, N, S>
impl<K, V, S, const N: usize> Serialize for IndexMap<K, V, S, N>
where
K: Eq + Hash + Serialize,
N: ArrayLength<Bucket<K, V>> + ArrayLength<Option<Pos>>,
S: BuildHasher,
V: Serialize,
{
@ -84,9 +82,8 @@ where
}
}
impl<K, V, N> Serialize for LinearMap<K, V, N>
impl<K, V, const N: usize> Serialize for LinearMap<K, V, N>
where
N: ArrayLength<(K, V)>,
K: Eq + Serialize,
V: Serialize,
{
@ -104,10 +101,7 @@ where
// String containers
impl<N> Serialize for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> Serialize for String<N> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,

View File

@ -9,9 +9,8 @@
//!
//! ```
//! use heapless::spsc::Queue;
//! use heapless::consts::*;
//!
//! let mut rb: Queue<u8, U4> = Queue::new();
//! let mut rb: Queue<u8, _, _, 4> = Queue::new();
//!
//! assert!(rb.enqueue(0).is_ok());
//! assert!(rb.enqueue(1).is_ok());
@ -25,10 +24,11 @@
//! - `Queue` can be `split` and then be used in Single Producer Single Consumer mode
//!
//! ```
//! use heapless::spsc::Queue;
//! use heapless::consts::*;
//! use heapless::spsc::{Queue, MultiCore};
//!
//! static mut Q: Queue<Event, U4> = Queue(heapless::i::Queue::new());
//! // Notice, type signature needs to be explicit for now.
//! // (min_const_eval, does not allow for default type assignments)
//! static mut Q: Queue<Event, usize, MultiCore, 4> = Queue::new();
//!
//! enum Event { A, B }
//!
@ -85,7 +85,6 @@
use core::{cell::UnsafeCell, fmt, hash, marker::PhantomData, mem::MaybeUninit, ptr};
use generic_array::{ArrayLength, GenericArray};
use hash32;
use crate::sealed::spsc as sealed;
@ -161,23 +160,30 @@ where
/// following constructors: `u8_sc`, `u16_sc`, `usize_sc` and `new_sc`. This variant is `unsafe` to
/// create because the programmer must make sure that the queue's consumer and producer endpoints
/// (if split) are kept on a single core for their entire lifetime.
pub struct Queue<T, N, U = usize, C = MultiCore>(
#[doc(hidden)] pub crate::i::Queue<GenericArray<T, N>, U, C>,
)
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore;
impl<T, N, U, C> Queue<T, N, U, C>
#[cfg(has_atomics)]
pub struct Queue<T, U, C, const N: usize>
where
U: sealed::Uxx,
C: sealed::XCore,
{
// this is from where we dequeue items
pub(crate) head: Atomic<U, C>,
// this is where we enqueue new items
pub(crate) tail: Atomic<U, C>,
pub(crate) buffer: MaybeUninit<[T; N]>,
}
impl<T, U, C, const N: usize> Queue<T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
/// Returns the maximum number of elements the queue can hold
pub fn capacity(&self) -> U {
U::saturate(N::to_usize())
U::saturate(N)
}
/// Returns `true` if the queue has a length of 0
@ -186,7 +192,7 @@ where
}
/// Iterates from the front of the queue to the back
pub fn iter(&self) -> Iter<'_, T, N, U, C> {
pub fn iter(&self) -> Iter<'_, T, U, C, N> {
Iter {
rb: self,
index: 0,
@ -195,7 +201,7 @@ where
}
/// Returns an iterator that allows modifying each value.
pub fn iter_mut(&mut self) -> IterMut<'_, T, N, U, C> {
pub fn iter_mut(&mut self) -> IterMut<'_, T, U, C, N> {
let len = self.len_usize();
IterMut {
rb: self,
@ -205,16 +211,15 @@ where
}
fn len_usize(&self) -> usize {
let head = self.0.head.load_relaxed().into();
let tail = self.0.tail.load_relaxed().into();
let head = self.head.load_relaxed().into();
let tail = self.tail.load_relaxed().into();
U::truncate(tail.wrapping_sub(head)).into()
}
}
impl<T, N, U, C> Drop for Queue<T, N, U, C>
impl<T, U, C, const N: usize> Drop for Queue<T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
@ -227,9 +232,8 @@ where
}
}
impl<T, N, U, C> fmt::Debug for Queue<T, N, U, C>
impl<T, U, C, const N: usize> fmt::Debug for Queue<T, U, C, N>
where
N: ArrayLength<T>,
T: fmt::Debug,
U: sealed::Uxx,
C: sealed::XCore,
@ -239,9 +243,8 @@ where
}
}
impl<T, N, U, C> hash::Hash for Queue<T, N, U, C>
impl<T, U, C, const N: usize> hash::Hash for Queue<T, U, C, N>
where
N: ArrayLength<T>,
T: hash::Hash,
U: sealed::Uxx,
C: sealed::XCore,
@ -254,9 +257,8 @@ where
}
}
impl<T, N, U, C> hash32::Hash for Queue<T, N, U, C>
impl<T, U, C, const N: usize> hash32::Hash for Queue<T, U, C, N>
where
N: ArrayLength<T>,
T: hash32::Hash,
U: sealed::Uxx,
C: sealed::XCore,
@ -269,28 +271,26 @@ where
}
}
impl<'a, T, N, U, C> IntoIterator for &'a Queue<T, N, U, C>
impl<'a, T, U, C, const N: usize> IntoIterator for &'a Queue<T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
type Item = &'a T;
type IntoIter = Iter<'a, T, N, U, C>;
type IntoIter = Iter<'a, T, U, C, N>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, T, N, U, C> IntoIterator for &'a mut Queue<T, N, U, C>
impl<'a, T, U, C, const N: usize> IntoIterator for &'a mut Queue<T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
type Item = &'a mut T;
type IntoIter = IterMut<'a, T, N, U, C>;
type IntoIter = IterMut<'a, T, U, C, N>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
@ -299,43 +299,21 @@ where
macro_rules! impl_ {
($uxx:ident, $uxx_sc:ident) => {
impl<T, N> Queue<T, N, $uxx, MultiCore>
where
N: ArrayLength<T>,
{
impl<T, const N: usize> Queue<T, $uxx, MultiCore, N> {
/// Creates an empty queue with a fixed capacity of `N`
pub fn $uxx() -> Self {
Queue(crate::i::Queue::$uxx())
}
}
impl<A> crate::i::Queue<A, $uxx, MultiCore> {
/// `spsc::Queue` `const` constructor; wrap the returned value in
/// [`spsc::Queue`](struct.Queue.html)
pub const fn $uxx() -> Self {
crate::i::Queue {
buffer: MaybeUninit::uninit(),
Self {
head: Atomic::new(0),
tail: Atomic::new(0),
buffer: MaybeUninit::uninit(),
}
}
}
impl<T, N> Queue<T, N, $uxx, SingleCore>
where
N: ArrayLength<T>,
{
impl<T, const N: usize> Queue<T, $uxx, SingleCore, N> {
/// Creates an empty queue with a fixed capacity of `N` (single core variant)
pub unsafe fn $uxx_sc() -> Self {
Queue(crate::i::Queue::$uxx_sc())
}
}
impl<A> crate::i::Queue<A, $uxx, SingleCore> {
/// `spsc::Queue` `const` constructor; wrap the returned value in
/// [`spsc::Queue`](struct.Queue.html)
pub const unsafe fn $uxx_sc() -> Self {
crate::i::Queue {
Self {
buffer: MaybeUninit::uninit(),
head: Atomic::new(0),
tail: Atomic::new(0),
@ -343,9 +321,8 @@ macro_rules! impl_ {
}
}
impl<T, N, C> Queue<T, N, $uxx, C>
impl<T, C, const N: usize> Queue<T, $uxx, C, N>
where
N: ArrayLength<T>,
C: sealed::XCore,
{
/// Returns a reference to the item in the front of the queue without dequeuing, or
@ -354,9 +331,8 @@ macro_rules! impl_ {
/// # Examples
/// ```
/// use heapless::spsc::Queue;
/// use heapless::consts::*;
///
/// let mut queue: Queue<u8, U235, _> = Queue::u8();
/// let mut queue: Queue<u8, _, _, 235> = Queue::u8();
/// let (mut producer, mut consumer) = queue.split();
/// assert_eq!(None, consumer.peek());
/// producer.enqueue(1);
@ -367,10 +343,10 @@ macro_rules! impl_ {
pub fn peek(&self) -> Option<&T> {
let cap = self.capacity();
let head = self.0.head.get();
let tail = self.0.tail.get();
let head = self.head.get();
let tail = self.tail.get();
let p = self.0.buffer.as_ptr();
let p = self.buffer.as_ptr();
if *head != *tail {
let item = unsafe { &*(p as *const T).add(usize::from(*head % cap)) };
@ -384,10 +360,10 @@ macro_rules! impl_ {
pub fn dequeue(&mut self) -> Option<T> {
let cap = self.capacity();
let head = self.0.head.get_mut();
let tail = self.0.tail.get_mut();
let head = self.head.get_mut();
let tail = self.tail.get_mut();
let p = self.0.buffer.as_ptr();
let p = self.buffer.as_ptr();
if *head != *tail {
let item = unsafe { (p as *const T).add(usize::from(*head % cap)).read() };
@ -403,8 +379,8 @@ macro_rules! impl_ {
/// Returns back the `item` if the queue is full
pub fn enqueue(&mut self, item: T) -> Result<(), T> {
let cap = self.capacity();
let head = *self.0.head.get_mut();
let tail = *self.0.tail.get_mut();
let head = *self.head.get_mut();
let tail = *self.tail.get_mut();
if tail.wrapping_sub(head) > cap - 1 {
Err(item)
@ -424,12 +400,12 @@ macro_rules! impl_ {
/// twice.
pub unsafe fn enqueue_unchecked(&mut self, item: T) {
let cap = self.capacity();
let tail = self.0.tail.get_mut();
let tail = self.tail.get_mut();
// NOTE(ptr::write) the memory slot that we are about to write to is
// uninitialized. We use `ptr::write` to avoid running `T`'s destructor on the
// uninitialized memory
(self.0.buffer.as_mut_ptr() as *mut T)
(self.buffer.as_mut_ptr() as *mut T)
.add(usize::from(*tail % cap))
.write(item);
*tail = tail.wrapping_add(1);
@ -437,24 +413,23 @@ macro_rules! impl_ {
/// Returns the number of elements in the queue
pub fn len(&self) -> $uxx {
let head = self.0.head.load_relaxed();
let tail = self.0.tail.load_relaxed();
let head = self.head.load_relaxed();
let tail = self.tail.load_relaxed();
tail.wrapping_sub(head)
}
}
impl<T, N, C> Clone for Queue<T, N, $uxx, C>
impl<T, C, const N: usize> Clone for Queue<T, $uxx, C, N>
where
T: Clone,
N: ArrayLength<T>,
C: sealed::XCore,
{
fn clone(&self) -> Self {
let mut new: Queue<T, N, $uxx, C> = Queue(crate::i::Queue {
let mut new: Queue<T, $uxx, C, N> = Queue {
buffer: MaybeUninit::uninit(),
head: Atomic::new(0),
tail: Atomic::new(0),
});
};
for s in self.iter() {
unsafe {
@ -469,39 +444,17 @@ macro_rules! impl_ {
};
}
impl<A> crate::i::Queue<A, usize, MultiCore> {
/// `spsc::Queue` `const` constructor; wrap the returned value in
/// [`spsc::Queue`](struct.Queue.html)
pub const fn new() -> Self {
crate::i::Queue::usize()
}
}
impl<T, N> Queue<T, N, usize, MultiCore>
where
N: ArrayLength<T>,
{
impl<T, const N: usize> Queue<T, usize, MultiCore, N> {
/// Alias for [`spsc::Queue::usize`](struct.Queue.html#method.usize)
pub fn new() -> Self {
Queue(crate::i::Queue::new())
pub const fn new() -> Self {
Queue::usize()
}
}
impl<A> crate::i::Queue<A, usize, SingleCore> {
/// `spsc::Queue` `const` constructor; wrap the returned value in
/// [`spsc::Queue`](struct.Queue.html)
pub const unsafe fn new_sc() -> Self {
crate::i::Queue::usize_sc()
}
}
impl<T, N> Queue<T, N, usize, SingleCore>
where
N: ArrayLength<T>,
{
impl<T, const N: usize> Queue<T, usize, SingleCore, N> {
/// Alias for [`spsc::Queue::usize_sc`](struct.Queue.html#method.usize_sc)
pub unsafe fn new_sc() -> Self {
Queue(crate::i::Queue::new_sc())
Queue::usize_sc()
}
}
@ -509,46 +462,42 @@ impl_!(u8, u8_sc);
impl_!(u16, u16_sc);
impl_!(usize, usize_sc);
impl<T, N, U, C, N2, U2, C2> PartialEq<Queue<T, N2, U2, C2>> for Queue<T, N, U, C>
impl<T, U, C, U2, C2, const N: usize, const N2: usize> PartialEq<Queue<T, U2, C2, N2>>
for Queue<T, U, C, N>
where
T: PartialEq,
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
N2: ArrayLength<T>,
U2: sealed::Uxx,
C2: sealed::XCore,
{
fn eq(&self, other: &Queue<T, N2, U2, C2>) -> bool {
fn eq(&self, other: &Queue<T, U2, C2, N2>) -> bool {
self.len_usize() == other.len_usize()
&& self.iter().zip(other.iter()).all(|(v1, v2)| v1 == v2)
}
}
impl<T, N, U, C> Eq for Queue<T, N, U, C>
impl<T, U, C, const N: usize> Eq for Queue<T, U, C, N>
where
T: Eq,
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
}
/// An iterator over the items of a queue
pub struct Iter<'a, T, N, U, C>
pub struct Iter<'a, T, U, C, const N: usize>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
rb: &'a Queue<T, N, U, C>,
rb: &'a Queue<T, U, C, N>,
index: usize,
len: usize,
}
impl<'a, T, N, U, C> Clone for Iter<'a, T, N, U, C>
impl<'a, T, U, C, const N: usize> Clone for Iter<'a, T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
@ -562,22 +511,20 @@ where
}
/// A mutable iterator over the items of a queue
pub struct IterMut<'a, T, N, U, C>
pub struct IterMut<'a, T, U, C, const N: usize>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
rb: &'a mut Queue<T, N, U, C>,
rb: &'a mut Queue<T, U, C, N>,
index: usize,
len: usize,
}
macro_rules! iterator {
(struct $name:ident -> $elem:ty, $ptr:ty, $asptr:ident, $mkref:ident) => {
impl<'a, T, N, U, C> Iterator for $name<'a, T, N, U, C>
impl<'a, T, U, C, const N: usize> Iterator for $name<'a, T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
@ -585,10 +532,10 @@ macro_rules! iterator {
fn next(&mut self) -> Option<$elem> {
if self.index < self.len {
let head = self.rb.0.head.load_relaxed().into();
let head = self.rb.head.load_relaxed().into();
let cap = self.rb.capacity().into();
let ptr = self.rb.0.buffer.$asptr() as $ptr;
let ptr = self.rb.buffer.$asptr() as $ptr;
let i = (head + self.index) % cap;
self.index += 1;
Some(unsafe { $mkref!(*ptr.offset(i as isize)) })
@ -598,18 +545,17 @@ macro_rules! iterator {
}
}
impl<'a, T, N, U, C> DoubleEndedIterator for $name<'a, T, N, U, C>
impl<'a, T, U, C, const N: usize> DoubleEndedIterator for $name<'a, T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
fn next_back(&mut self) -> Option<$elem> {
if self.index < self.len {
let head = self.rb.0.head.load_relaxed().into();
let head = self.rb.head.load_relaxed().into();
let cap = self.rb.capacity().into();
let ptr = self.rb.0.buffer.$asptr() as $ptr;
let ptr = self.rb.buffer.$asptr() as $ptr;
// self.len > 0, since it's larger than self.index > 0
let i = (head + self.len - 1) % cap;
self.len -= 1;
@ -641,11 +587,21 @@ iterator!(struct IterMut -> &'a mut T, *mut T, as_mut_ptr, make_ref_mut);
mod tests {
use hash32::Hasher;
use crate::{consts::*, spsc::Queue};
use crate::spsc::{MultiCore, Queue, SingleCore};
#[test]
fn static_usize_sc() {
static mut _Q: Queue<i32, usize, SingleCore, 4> = unsafe { Queue::usize_sc() };
}
#[test]
fn static_usize() {
static mut _Q: Queue<i32, usize, MultiCore, 4> = Queue::usize();
}
#[test]
fn static_new() {
static mut _Q: Queue<i32, U4> = Queue(crate::i::Queue::new());
static mut _Q: Queue<i32, usize, MultiCore, 4> = Queue::new();
}
#[test]
@ -671,7 +627,7 @@ mod tests {
static mut COUNT: i32 = 0;
{
let mut v: Queue<Droppable, U4> = Queue::new();
let mut v: Queue<Droppable, usize, SingleCore, 4> = unsafe { Queue::usize_sc() };
v.enqueue(Droppable::new()).ok().unwrap();
v.enqueue(Droppable::new()).ok().unwrap();
v.dequeue().unwrap();
@ -680,7 +636,7 @@ mod tests {
assert_eq!(unsafe { COUNT }, 0);
{
let mut v: Queue<Droppable, U4> = Queue::new();
let mut v: Queue<Droppable, usize, MultiCore, 4> = Queue::usize();
v.enqueue(Droppable::new()).ok().unwrap();
v.enqueue(Droppable::new()).ok().unwrap();
}
@ -690,7 +646,7 @@ mod tests {
#[test]
fn full() {
let mut rb: Queue<i32, U4> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
@ -702,7 +658,7 @@ mod tests {
#[test]
fn iter() {
let mut rb: Queue<i32, U4> = Queue::new();
let mut rb: Queue<i32, u16, MultiCore, 4> = Queue::u16();
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
@ -718,7 +674,7 @@ mod tests {
#[test]
fn iter_double_ended() {
let mut rb: Queue<i32, U4> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
@ -735,7 +691,7 @@ mod tests {
#[test]
fn iter_overflow() {
let mut rb: Queue<i32, U4, u8> = Queue::u8();
let mut rb: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb.enqueue(0).unwrap();
for _ in 0..300 {
@ -749,7 +705,7 @@ mod tests {
#[test]
fn iter_mut() {
let mut rb: Queue<i32, U4> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
@ -765,7 +721,7 @@ mod tests {
#[test]
fn iter_mut_double_ended() {
let mut rb: Queue<i32, U4> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
@ -782,21 +738,17 @@ mod tests {
#[test]
fn sanity() {
let mut rb: Queue<i32, U4> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 4> = Queue::u8();
assert_eq!(rb.dequeue(), None);
rb.enqueue(0).unwrap();
assert_eq!(rb.dequeue(), Some(0));
assert_eq!(rb.dequeue(), None);
}
#[test]
#[cfg(feature = "smaller-atomics")]
fn u8() {
let mut rb: Queue<u8, U256, _> = Queue::u8();
let mut rb: Queue<u8, u8, MultiCore, 256> = Queue::u8();
for _ in 0..255 {
rb.enqueue(0).unwrap();
@ -807,7 +759,7 @@ mod tests {
#[test]
fn wrap_around() {
let mut rb: Queue<i32, U3> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 3> = Queue::u8();
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
@ -823,7 +775,7 @@ mod tests {
#[test]
fn ready_flag() {
let mut rb: Queue<i32, U2> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 2> = Queue::u8();
let (mut p, mut c) = rb.split();
assert_eq!(c.ready(), false);
assert_eq!(p.ready(), true);
@ -851,7 +803,7 @@ mod tests {
#[test]
fn clone() {
let mut rb1: Queue<i32, U4> = Queue::new();
let mut rb1: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb1.enqueue(0).unwrap();
rb1.enqueue(0).unwrap();
rb1.dequeue().unwrap();
@ -866,12 +818,12 @@ mod tests {
fn eq() {
// generate two queues with same content
// but different buffer alignment
let mut rb1: Queue<i32, U4> = Queue::new();
let mut rb1: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb1.enqueue(0).unwrap();
rb1.enqueue(0).unwrap();
rb1.dequeue().unwrap();
rb1.enqueue(0).unwrap();
let mut rb2: Queue<i32, U4> = Queue::new();
let mut rb2: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb2.enqueue(0).unwrap();
rb2.enqueue(0).unwrap();
assert!(rb1 == rb2);
@ -892,7 +844,7 @@ mod tests {
// generate two queues with same content
// but different buffer alignment
let rb1 = {
let mut rb1: Queue<i32, U4> = Queue::new();
let mut rb1: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb1.enqueue(0).unwrap();
rb1.enqueue(0).unwrap();
rb1.dequeue().unwrap();
@ -900,7 +852,7 @@ mod tests {
rb1
};
let rb2 = {
let mut rb2: Queue<i32, U4> = Queue::new();
let mut rb2: Queue<i32, u8, MultiCore, 4> = Queue::u8();
rb2.enqueue(0).unwrap();
rb2.enqueue(0).unwrap();
rb2

View File

@ -1,20 +1,18 @@
use core::{marker::PhantomData, ptr::NonNull};
use generic_array::ArrayLength;
use crate::{
sealed::spsc as sealed,
spsc::{MultiCore, Queue},
spsc::Queue,
// spsc::{MultiCore, Queue}, // we cannot currently default to MultiCore
};
impl<T, N, U, C> Queue<T, N, U, C>
impl<T, U, C, const N: usize> Queue<T, U, C, N>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
/// Splits a statically allocated queue into producer and consumer end points
pub fn split<'rb>(&'rb mut self) -> (Producer<'rb, T, N, U, C>, Consumer<'rb, T, N, U, C>) {
pub fn split<'rb>(&'rb mut self) -> (Producer<'rb, T, U, C, N>, Consumer<'rb, T, U, C, N>) {
(
Producer {
rb: unsafe { NonNull::new_unchecked(self) },
@ -30,19 +28,17 @@ where
/// A queue "consumer"; it can dequeue items from the queue
// NOTE the consumer semantically owns the `head` pointer of the queue
pub struct Consumer<'a, T, N, U = usize, C = MultiCore>
pub struct Consumer<'a, T, U, C, const N: usize>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
rb: NonNull<Queue<T, N, U, C>>,
rb: NonNull<Queue<T, U, C, N>>,
_marker: PhantomData<&'a ()>,
}
unsafe impl<'a, T, N, U, C> Send for Consumer<'a, T, N, U, C>
unsafe impl<'a, T, U, C, const N: usize> Send for Consumer<'a, T, U, C, N>
where
N: ArrayLength<T>,
T: Send,
U: sealed::Uxx,
C: sealed::XCore,
@ -51,19 +47,17 @@ where
/// A queue "producer"; it can enqueue items into the queue
// NOTE the producer semantically owns the `tail` pointer of the queue
pub struct Producer<'a, T, N, U = usize, C = MultiCore>
pub struct Producer<'a, T, U, C, const N: usize>
where
N: ArrayLength<T>,
U: sealed::Uxx,
C: sealed::XCore,
{
rb: NonNull<Queue<T, N, U, C>>,
rb: NonNull<Queue<T, U, C, N>>,
_marker: PhantomData<&'a ()>,
}
unsafe impl<'a, T, N, U, C> Send for Producer<'a, T, N, U, C>
unsafe impl<'a, T, U, C, const N: usize> Send for Producer<'a, T, U, C, N>
where
N: ArrayLength<T>,
T: Send,
U: sealed::Uxx,
C: sealed::XCore,
@ -72,16 +66,15 @@ where
macro_rules! impl_ {
($uxx:ident) => {
impl<'a, T, N, C> Consumer<'a, T, N, $uxx, C>
impl<'a, T, C, const N: usize> Consumer<'a, T, $uxx, C, N>
where
N: ArrayLength<T>,
C: sealed::XCore,
{
/// Returns if there are any items to dequeue. When this returns true, at least the
/// first subsequent dequeue will succeed.
pub fn ready(&self) -> bool {
let head = unsafe { self.rb.as_ref().0.head.load_relaxed() };
let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; // ▼
let head = unsafe { self.rb.as_ref().head.load_relaxed() };
let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; // ▼
return head != tail;
}
@ -90,9 +83,8 @@ macro_rules! impl_ {
/// # Examples
/// ```
/// use heapless::spsc::Queue;
/// use heapless::consts::*;
///
/// let mut queue: Queue<u8, U235, _> = Queue::u8();
/// let mut queue: Queue<u8, _, _, 235> = Queue::u8();
/// let (mut producer, mut consumer) = queue.split();
/// assert_eq!(None, consumer.peek());
/// producer.enqueue(1);
@ -101,8 +93,8 @@ macro_rules! impl_ {
/// assert_eq!(None, consumer.peek());
/// ```
pub fn peek(&self) -> Option<&T> {
let head = unsafe { self.rb.as_ref().0.head.load_relaxed() };
let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() };
let head = unsafe { self.rb.as_ref().head.load_relaxed() };
let tail = unsafe { self.rb.as_ref().tail.load_acquire() };
if head != tail {
Some(unsafe { self._peek(head) })
@ -113,8 +105,8 @@ macro_rules! impl_ {
/// Returns the item in the front of the queue, or `None` if the queue is empty
pub fn dequeue(&mut self) -> Option<T> {
let head = unsafe { self.rb.as_ref().0.head.load_relaxed() };
let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; // ▼
let head = unsafe { self.rb.as_ref().head.load_relaxed() };
let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; // ▼
if head != tail {
Some(unsafe { self._dequeue(head) }) // ▲
@ -129,8 +121,8 @@ macro_rules! impl_ {
///
/// If the queue is empty this is equivalent to calling `mem::uninitialized`
pub unsafe fn dequeue_unchecked(&mut self) -> T {
let head = self.rb.as_ref().0.head.load_relaxed();
debug_assert_ne!(head, self.rb.as_ref().0.tail.load_acquire());
let head = self.rb.as_ref().head.load_relaxed();
debug_assert_ne!(head, self.rb.as_ref().tail.load_acquire());
self._dequeue(head) // ▲
}
@ -139,7 +131,7 @@ macro_rules! impl_ {
let cap = rb.capacity();
let item = (rb.0.buffer.as_ptr() as *const T).add(usize::from(head % cap));
let item = (rb.buffer.as_ptr() as *const T).add(usize::from(head % cap));
&*item
}
@ -148,17 +140,16 @@ macro_rules! impl_ {
let cap = rb.capacity();
let item = (rb.0.buffer.as_ptr() as *const T)
let item = (rb.buffer.as_ptr() as *const T)
.add(usize::from(head % cap))
.read();
rb.0.head.store_release(head.wrapping_add(1)); // ▲
rb.head.store_release(head.wrapping_add(1)); // ▲
item
}
}
impl<'a, T, N, C> Producer<'a, T, N, $uxx, C>
impl<'a, T, C, const N: usize> Producer<'a, T, $uxx, C, N>
where
N: ArrayLength<T>,
C: sealed::XCore,
{
/// Returns if there is any space to enqueue a new item. When this returns true, at
@ -166,13 +157,13 @@ macro_rules! impl_ {
pub fn ready(&self) -> bool {
let cap = unsafe { self.rb.as_ref().capacity() };
let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() };
let tail = unsafe { self.rb.as_ref().tail.load_relaxed() };
// NOTE we could replace this `load_acquire` with a `load_relaxed` and this method
// would be sound on most architectures but that change would result in UB according
// to the C++ memory model, which is what Rust currently uses, so we err on the side
// of caution and stick to `load_acquire`. Check issue google#sanitizers#882 for
// more details.
let head = unsafe { self.rb.as_ref().0.head.load_acquire() };
let head = unsafe { self.rb.as_ref().head.load_acquire() };
return head.wrapping_add(cap) != tail;
}
@ -181,13 +172,13 @@ macro_rules! impl_ {
/// Returns back the `item` if the queue is full
pub fn enqueue(&mut self, item: T) -> Result<(), T> {
let cap = unsafe { self.rb.as_ref().capacity() };
let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() };
let tail = unsafe { self.rb.as_ref().tail.load_relaxed() };
// NOTE we could replace this `load_acquire` with a `load_relaxed` and this method
// would be sound on most architectures but that change would result in UB according
// to the C++ memory model, which is what Rust currently uses, so we err on the side
// of caution and stick to `load_acquire`. Check issue google#sanitizers#882 for
// more details.
let head = unsafe { self.rb.as_ref().0.head.load_acquire() }; // ▼
let head = unsafe { self.rb.as_ref().head.load_acquire() }; // ▼
if tail.wrapping_sub(head) > cap - 1 {
Err(item)
@ -206,8 +197,8 @@ macro_rules! impl_ {
/// to create a copy of `item`, which could result in `T`'s destructor running on `item`
/// twice.
pub unsafe fn enqueue_unchecked(&mut self, item: T) {
let tail = self.rb.as_ref().0.tail.load_relaxed();
debug_assert_ne!(tail.wrapping_add(1), self.rb.as_ref().0.head.load_acquire());
let tail = self.rb.as_ref().tail.load_relaxed();
debug_assert_ne!(tail.wrapping_add(1), self.rb.as_ref().head.load_acquire());
self._enqueue(tail, item); // ▲
}
@ -219,10 +210,10 @@ macro_rules! impl_ {
// NOTE(ptr::write) the memory slot that we are about to write to is
// uninitialized. We use `ptr::write` to avoid running `T`'s destructor on the
// uninitialized memory
(rb.0.buffer.as_mut_ptr() as *mut T)
(rb.buffer.as_mut_ptr() as *mut T)
.add(usize::from(tail % cap))
.write(item);
rb.0.tail.store_release(tail.wrapping_add(1)); // ▲
rb.tail.store_release(tail.wrapping_add(1)); // ▲
}
}
};
@ -234,11 +225,11 @@ impl_!(usize);
#[cfg(test)]
mod tests {
use crate::{consts::*, spsc::Queue};
use crate::spsc::{MultiCore, Queue};
#[test]
fn sanity() {
let mut rb: Queue<i32, U2> = Queue::new();
let mut rb: Queue<i32, u8, MultiCore, 2> = Queue::u8();
let (mut p, mut c) = rb.split();

View File

@ -7,32 +7,23 @@ use core::{
str::Utf8Error,
};
use generic_array::{
typenum::{consts::*, IsGreaterOrEqual},
ArrayLength, GenericArray,
};
use hash32;
use crate::Vec;
/// A fixed capacity [`String`](https://doc.rust-lang.org/std/string/struct.String.html)
pub struct String<N>(#[doc(hidden)] pub crate::i::String<GenericArray<u8, N>>)
where
N: ArrayLength<u8>;
impl<A> crate::i::String<A> {
/// `String` `const` constructor; wrap the returned value in [`String`](../struct.String.html)
pub const fn new() -> Self {
Self {
vec: crate::i::Vec::new(),
}
}
pub struct String<const N: usize> {
vec: Vec<u8, N>,
}
impl<N> String<N>
where
N: ArrayLength<u8>,
{
// impl<const N: usize> String<N> {
// /// `String` `const` constructor; wrap the returned value in [`String`](../struct.String.html)
// pub const fn new() -> Self {
// Self { vec: Vec::new() }
// }
// }
impl<const N: usize> String<N> {
/// Constructs a new, empty `String` with a fixed capacity of `N`
///
/// # Examples
@ -41,17 +32,16 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// // allocate the string on the stack
/// let mut s: String<U4> = String::new();
/// let mut s: String<4> = String::new();
///
/// // allocate the string in a static variable
/// static mut S: String<U4> = String(heapless::i::String::new());
/// static mut S: String<4> = String::new();
/// ```
#[inline]
pub fn new() -> Self {
String(crate::i::String::new())
pub const fn new() -> Self {
Self { vec: Vec::new() }
}
/// Converts a vector of bytes into a `String`.
@ -70,9 +60,8 @@ where
///
/// ```
/// use heapless::{String, Vec};
/// use heapless::consts::*;
///
/// let mut v: Vec<u8, U8> = Vec::new();
/// let mut v: Vec<u8, 8> = Vec::new();
/// v.push('a' as u8).unwrap();
/// v.push('b' as u8).unwrap();
///
@ -84,11 +73,10 @@ where
///
/// ```
/// use heapless::{String, Vec};
/// use heapless::consts::*;
///
/// // some invalid bytes, in a vector
///
/// let mut v: Vec<u8, U8> = Vec::new();
/// let mut v: Vec<u8, 8> = Vec::new();
/// v.push(0).unwrap();
/// v.push(159).unwrap();
/// v.push(146).unwrap();
@ -110,9 +98,9 @@ where
#[inline]
pub unsafe fn from_utf8_unchecked(mut vec: Vec<u8, N>) -> String<N> {
// FIXME this may result in a memcpy at runtime
let vec_ = mem::replace(&mut vec.0, MaybeUninit::uninit().assume_init());
let vec_ = mem::replace(&mut vec, MaybeUninit::uninit().assume_init());
mem::forget(vec);
String(crate::i::String { vec: vec_ })
String { vec: vec_ }
}
/// Converts a `String` into a byte vector.
@ -125,9 +113,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let s: String<U4> = String::from("ab");
/// let s: String<4> = String::from("ab");
/// let b = s.into_bytes();
/// assert!(b.len() == 2);
///
@ -135,7 +122,7 @@ where
/// ```
#[inline]
pub fn into_bytes(self) -> Vec<u8, N> {
Vec(self.0.vec)
self.vec
}
/// Extracts a string slice containing the entire string.
@ -146,9 +133,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U4> = String::from("ab");
/// let mut s: String<4> = String::from("ab");
/// assert!(s.as_str() == "ab");
///
/// let _s = s.as_str();
@ -156,7 +142,7 @@ where
/// ```
#[inline]
pub fn as_str(&self) -> &str {
unsafe { str::from_utf8_unchecked(self.0.vec.as_slice()) }
unsafe { str::from_utf8_unchecked(self.vec.as_slice()) }
}
/// Converts a `String` into a mutable string slice.
@ -167,15 +153,14 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U4> = String::from("ab");
/// let mut s: String<4> = String::from("ab");
/// let s = s.as_mut_str();
/// s.make_ascii_uppercase();
/// ```
#[inline]
pub fn as_mut_str(&mut self) -> &mut str {
unsafe { str::from_utf8_unchecked_mut(self.0.vec.as_mut_slice()) }
unsafe { str::from_utf8_unchecked_mut(self.vec.as_mut_slice()) }
}
/// Returns a mutable reference to the contents of this `String`.
@ -203,7 +188,7 @@ where
/// assert_eq!(s, "olleh");
/// ```
pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8, N> {
&mut *(&mut self.0.vec as *mut crate::i::Vec<GenericArray<u8, N>> as *mut Vec<u8, N>)
&mut self.vec
}
/// Appends a given string slice onto the end of this `String`.
@ -214,9 +199,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U8> = String::from("foo");
/// let mut s: String<8> = String::from("foo");
///
/// assert!(s.push_str("bar").is_ok());
///
@ -226,7 +210,7 @@ where
/// ```
#[inline]
pub fn push_str(&mut self, string: &str) -> Result<(), ()> {
self.0.vec.extend_from_slice(string.as_bytes())
self.vec.extend_from_slice(string.as_bytes())
}
/// Returns the maximum number of elements the String can hold
@ -237,14 +221,13 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U4> = String::new();
/// let mut s: String<4> = String::new();
/// assert!(s.capacity() == 4);
/// ```
#[inline]
pub fn capacity(&self) -> usize {
self.0.vec.capacity()
self.vec.capacity()
}
/// Appends the given [`char`] to the end of this `String`.
@ -257,9 +240,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U8> = String::from("abc");
/// let mut s: String<8> = String::from("abc");
///
/// s.push('1').unwrap();
/// s.push('2').unwrap();
@ -272,9 +254,8 @@ where
#[inline]
pub fn push(&mut self, c: char) -> Result<(), ()> {
match c.len_utf8() {
1 => self.0.vec.push(c as u8).map_err(|_| {}),
1 => self.vec.push(c as u8).map_err(|_| {}),
_ => self
.0
.vec
.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes()),
}
@ -300,9 +281,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U8> = String::from("hello");
/// let mut s: String<8> = String::from("hello");
///
/// s.truncate(2);
///
@ -312,7 +292,7 @@ where
pub fn truncate(&mut self, new_len: usize) {
if new_len <= self.len() {
assert!(self.is_char_boundary(new_len));
self.0.vec.truncate(new_len)
self.vec.truncate(new_len)
}
}
@ -328,9 +308,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U8> = String::from("foo");
/// let mut s: String<8> = String::from("foo");
///
/// assert_eq!(s.pop(), Some('o'));
/// assert_eq!(s.pop(), Some('o'));
@ -344,7 +323,7 @@ where
// pop bytes that correspond to `ch`
for _ in 0..ch.len_utf8() {
unsafe {
self.0.vec.pop_unchecked();
self.vec.pop_unchecked();
}
}
@ -362,9 +341,8 @@ where
///
/// ```
/// use heapless::String;
/// use heapless::consts::*;
///
/// let mut s: String<U8> = String::from("foo");
/// let mut s: String<8> = String::from("foo");
///
/// s.clear();
///
@ -374,23 +352,17 @@ where
/// ```
#[inline]
pub fn clear(&mut self) {
self.0.vec.clear()
self.vec.clear()
}
}
impl<N> Default for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> Default for String<N> {
fn default() -> Self {
Self::new()
}
}
impl<'a, N> From<&'a str> for String<N>
where
N: ArrayLength<u8>,
{
impl<'a, const N: usize> From<&'a str> for String<N> {
fn from(s: &'a str) -> Self {
let mut new = String::new();
new.push_str(s).unwrap();
@ -398,10 +370,7 @@ where
}
}
impl<N> str::FromStr for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> str::FromStr for String<N> {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
@ -411,59 +380,41 @@ where
}
}
impl<N> Clone for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> Clone for String<N> {
fn clone(&self) -> Self {
Self(crate::i::String {
vec: self.0.vec.clone(),
})
Self {
vec: self.vec.clone(),
}
}
}
impl<N> fmt::Debug for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> fmt::Debug for String<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
<str as fmt::Debug>::fmt(self, f)
}
}
impl<N> fmt::Display for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> fmt::Display for String<N> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
<str as fmt::Display>::fmt(self, f)
}
}
impl<N> hash::Hash for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> hash::Hash for String<N> {
#[inline]
fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
<str as hash::Hash>::hash(self, hasher)
}
}
impl<N> hash32::Hash for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> hash32::Hash for String<N> {
#[inline]
fn hash<H: hash32::Hasher>(&self, hasher: &mut H) {
<str as hash32::Hash>::hash(self, hasher)
}
}
impl<N> fmt::Write for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> fmt::Write for String<N> {
fn write_str(&mut self, s: &str) -> Result<(), fmt::Error> {
self.push_str(s).map_err(|_| fmt::Error)
}
@ -473,10 +424,7 @@ where
}
}
impl<N> ops::Deref for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> ops::Deref for String<N> {
type Target = str;
fn deref(&self) -> &str {
@ -484,40 +432,27 @@ where
}
}
impl<N> ops::DerefMut for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> ops::DerefMut for String<N> {
fn deref_mut(&mut self) -> &mut str {
self.as_mut_str()
}
}
impl<N> AsRef<str> for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> AsRef<str> for String<N> {
#[inline]
fn as_ref(&self) -> &str {
self
}
}
impl<N> AsRef<[u8]> for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> AsRef<[u8]> for String<N> {
#[inline]
fn as_ref(&self) -> &[u8] {
self.as_bytes()
}
}
impl<N1, N2> PartialEq<String<N2>> for String<N1>
where
N1: ArrayLength<u8>,
N2: ArrayLength<u8>,
{
impl<const N1: usize, const N2: usize> PartialEq<String<N2>> for String<N1> {
fn eq(&self, rhs: &String<N2>) -> bool {
str::eq(&**self, &**rhs)
}
@ -527,49 +462,99 @@ where
}
}
macro_rules! impl_eq {
($lhs:ty, $rhs:ty) => {
impl<'a, 'b, N> PartialEq<$rhs> for $lhs
where
N: ArrayLength<u8>,
{
#[inline]
fn eq(&self, other: &$rhs) -> bool {
str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &$rhs) -> bool {
str::ne(&self[..], &other[..])
}
}
// macro_rules! impl_eq {
// ($lhs:ty, $rhs:ty) => {
// impl<'a, 'b, N> PartialEq<$rhs> for $lhs
// where
// N: ArrayLength<u8>,
// {
// #[inline]
// fn eq(&self, other: &$rhs) -> bool {
// str::eq(&self[..], &other[..])
// }
// #[inline]
// fn ne(&self, other: &$rhs) -> bool {
// str::ne(&self[..], &other[..])
// }
// }
impl<'a, 'b, N> PartialEq<$lhs> for $rhs
where
N: ArrayLength<u8>,
{
#[inline]
fn eq(&self, other: &$lhs) -> bool {
str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &$lhs) -> bool {
str::ne(&self[..], &other[..])
}
}
};
// impl<'a, 'b, N> PartialEq<$lhs> for $rhs
// where
// N: ArrayLength<u8>,
// {
// #[inline]
// fn eq(&self, other: &$lhs) -> bool {
// str::eq(&self[..], &other[..])
// }
// #[inline]
// fn ne(&self, other: &$lhs) -> bool {
// str::ne(&self[..], &other[..])
// }
// }
// };
// }
// String<N> == str
impl<const N: usize> PartialEq<str> for String<N> {
#[inline]
fn eq(&self, other: &str) -> bool {
str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &str) -> bool {
str::ne(&self[..], &other[..])
}
}
impl_eq! { String<N>, str }
impl_eq! { String<N>, &'a str }
// String<N> == &'str
impl<const N: usize> PartialEq<&str> for String<N> {
#[inline]
fn eq(&self, other: &&str) -> bool {
str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &&str) -> bool {
str::ne(&self[..], &other[..])
}
}
impl<N> Eq for String<N> where N: ArrayLength<u8> {}
// str == String<N>
impl<const N: usize> PartialEq<String<N>> for str {
#[inline]
fn eq(&self, other: &String<N>) -> bool {
str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &String<N>) -> bool {
str::ne(&self[..], &other[..])
}
}
// &'str == String<N>
impl<const N: usize> PartialEq<String<N>> for &str {
#[inline]
fn eq(&self, other: &String<N>) -> bool {
str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &String<N>) -> bool {
str::ne(&self[..], &other[..])
}
}
impl<const N: usize> Eq for String<N> {}
// impl<const N: usize, D: core::fmt::Display> From<D> for String<N> {
// fn from(s: D) -> Self {
// let mut new = String::new();
// write!(&mut new, "{}", s).unwrap();
// new
// }
// }
macro_rules! impl_from_num {
($num:ty, $size:ty) => {
impl<N> From<$num> for String<N>
where
N: ArrayLength<u8> + IsGreaterOrEqual<$size, Output = True>,
{
($num:ty, $size:expr) => {
impl<const N: usize> From<$num> for String<N> {
fn from(s: $num) -> Self {
let mut new = String::new();
write!(&mut new, "{}", s).unwrap();
@ -579,28 +564,28 @@ macro_rules! impl_from_num {
};
}
impl_from_num!(i8, U4);
impl_from_num!(i16, U6);
impl_from_num!(i32, U11);
impl_from_num!(i64, U20);
impl_from_num!(i8, 4);
impl_from_num!(i16, 6);
impl_from_num!(i32, 11);
impl_from_num!(i64, 20);
impl_from_num!(u8, U3);
impl_from_num!(u16, U5);
impl_from_num!(u32, U10);
impl_from_num!(u64, U20);
impl_from_num!(u8, 3);
impl_from_num!(u16, 5);
impl_from_num!(u32, 10);
impl_from_num!(u64, 20);
#[cfg(test)]
mod tests {
use crate::{consts::*, String, Vec};
use crate::{String, Vec};
#[test]
fn static_new() {
static mut _S: String<U8> = String(crate::i::String::new());
static mut _S: String<8> = String::new();
}
#[test]
fn clone() {
let s1: String<U20> = String::from("abcd");
let s1: String<20> = String::from("abcd");
let mut s2 = s1.clone();
s2.push_str(" efgh").unwrap();
@ -612,7 +597,7 @@ mod tests {
fn debug() {
use core::fmt::Write;
let s: String<U8> = String::from("abcd");
let s: String<8> = String::from("abcd");
let mut std_s = std::string::String::new();
write!(std_s, "{:?}", s).unwrap();
assert_eq!("\"abcd\"", std_s);
@ -622,7 +607,7 @@ mod tests {
fn display() {
use core::fmt::Write;
let s: String<U8> = String::from("abcd");
let s: String<8> = String::from("abcd");
let mut std_s = std::string::String::new();
write!(std_s, "{}", s).unwrap();
assert_eq!("abcd", std_s);
@ -630,7 +615,7 @@ mod tests {
#[test]
fn empty() {
let s: String<U4> = String::new();
let s: String<4> = String::new();
assert!(s.capacity() == 4);
assert_eq!(s, "");
assert_eq!(s.len(), 0);
@ -639,7 +624,7 @@ mod tests {
#[test]
fn from() {
let s: String<U4> = String::from("123");
let s: String<4> = String::from("123");
assert!(s.len() == 3);
assert_eq!(s, "123");
}
@ -648,23 +633,23 @@ mod tests {
fn from_str() {
use core::str::FromStr;
let s: String<U4> = String::<U4>::from_str("123").unwrap();
let s: String<4> = String::<4>::from_str("123").unwrap();
assert!(s.len() == 3);
assert_eq!(s, "123");
let e: () = String::<U2>::from_str("123").unwrap_err();
let e: () = String::<2>::from_str("123").unwrap_err();
assert_eq!(e, ());
}
#[test]
#[should_panic]
fn from_panic() {
let _: String<U4> = String::from("12345");
let _: String<4> = String::from("12345");
}
#[test]
fn from_utf8() {
let mut v: Vec<u8, U8> = Vec::new();
let mut v: Vec<u8, 8> = Vec::new();
v.push('a' as u8).unwrap();
v.push('b' as u8).unwrap();
@ -674,7 +659,7 @@ mod tests {
#[test]
fn from_utf8_uenc() {
let mut v: Vec<u8, U8> = Vec::new();
let mut v: Vec<u8, 8> = Vec::new();
v.push(240).unwrap();
v.push(159).unwrap();
v.push(146).unwrap();
@ -685,7 +670,7 @@ mod tests {
#[test]
fn from_utf8_uenc_err() {
let mut v: Vec<u8, U8> = Vec::new();
let mut v: Vec<u8, 8> = Vec::new();
v.push(0).unwrap();
v.push(159).unwrap();
v.push(146).unwrap();
@ -696,7 +681,7 @@ mod tests {
#[test]
fn from_utf8_unchecked() {
let mut v: Vec<u8, U8> = Vec::new();
let mut v: Vec<u8, 8> = Vec::new();
v.push(104).unwrap();
v.push(101).unwrap();
v.push(108).unwrap();
@ -710,22 +695,21 @@ mod tests {
#[test]
fn from_num() {
let v = String::<U20>::from(18446744073709551615 as u64);
let v: String<20> = String::from(18446744073709551615 as u64);
assert_eq!(v, "18446744073709551615");
}
#[test]
fn into_bytes() {
let s: String<U4> = String::from("ab");
let b: Vec<u8, U4> = s.into_bytes();
let s: String<4> = String::from("ab");
let b: Vec<u8, 4> = s.into_bytes();
assert_eq!(b.len(), 2);
assert_eq!(&['a' as u8, 'b' as u8], &b[..]);
}
#[test]
fn as_str() {
let s: String<U4> = String::from("ab");
let s: String<4> = String::from("ab");
assert_eq!(s.as_str(), "ab");
// should be moved to fail test
@ -735,7 +719,7 @@ mod tests {
#[test]
fn as_mut_str() {
let mut s: String<U4> = String::from("ab");
let mut s: String<4> = String::from("ab");
let s = s.as_mut_str();
s.make_ascii_uppercase();
assert_eq!(s, "AB");
@ -743,16 +727,18 @@ mod tests {
#[test]
fn push_str() {
let mut s: String<U8> = String::from("foo");
let mut s: String<8> = String::from("foo");
assert!(s.push_str("bar").is_ok());
assert_eq!("foobar", s);
assert_eq!(s, "foobar");
assert!(s.push_str("tender").is_err());
assert_eq!("foobar", s);
assert_eq!(s, "foobar");
}
#[test]
fn push() {
let mut s: String<U6> = String::from("abc");
let mut s: String<6> = String::from("abc");
assert!(s.push('1').is_ok());
assert!(s.push('2').is_ok());
assert!(s.push('3').is_ok());
@ -762,13 +748,13 @@ mod tests {
#[test]
fn as_bytes() {
let s: String<U8> = String::from("hello");
let s: String<8> = String::from("hello");
assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes());
}
#[test]
fn truncate() {
let mut s: String<U8> = String::from("hello");
let mut s: String<8> = String::from("hello");
s.truncate(6);
assert_eq!(s.len(), 5);
s.truncate(2);
@ -779,7 +765,7 @@ mod tests {
#[test]
fn pop() {
let mut s: String<U8> = String::from("foo");
let mut s: String<8> = String::from("foo");
assert_eq!(s.pop(), Some('o'));
assert_eq!(s.pop(), Some('o'));
assert_eq!(s.pop(), Some('f'));
@ -788,7 +774,7 @@ mod tests {
#[test]
fn pop_uenc() {
let mut s: String<U8> = String::from("");
let mut s: String<8> = String::from("");
assert_eq!(s.len(), 3);
match s.pop() {
Some(c) => {
@ -802,7 +788,7 @@ mod tests {
#[test]
fn is_empty() {
let mut v: String<U8> = String::new();
let mut v: String<8> = String::new();
assert!(v.is_empty());
let _ = v.push('a');
assert!(!v.is_empty());
@ -810,7 +796,7 @@ mod tests {
#[test]
fn clear() {
let mut s: String<U8> = String::from("foo");
let mut s: String<8> = String::from("foo");
s.clear();
assert!(s.is_empty());
assert_eq!(0, s.len());

View File

@ -1,25 +1,15 @@
use ufmt_write::uWrite;
use crate::{
ArrayLength,
string::String,
vec::Vec,
};
use crate::{string::String, vec::Vec};
impl<N> uWrite for String<N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> uWrite for String<N> {
type Error = ();
fn write_str(&mut self, s: &str) -> Result<(), Self::Error> {
self.push_str(s)
}
}
impl<N> uWrite for Vec<u8, N>
where
N: ArrayLength<u8>,
{
impl<const N: usize> uWrite for Vec<u8, N> {
type Error = ();
fn write_str(&mut self, s: &str) -> Result<(), Self::Error> {
self.extend_from_slice(s.as_bytes())
@ -32,8 +22,6 @@ mod tests {
use ufmt::{derive::uDebug, uwrite};
use crate::consts::*;
#[derive(uDebug)]
struct Pair {
x: u32,
@ -45,7 +33,7 @@ mod tests {
let a = 123;
let b = Pair { x: 0, y: 1234 };
let mut s = String::<U32>::new();
let mut s = String::<32>::new();
uwrite!(s, "{} -> {:?}", a, b).unwrap();
assert_eq!(s, "123 -> Pair { x: 0, y: 1234 }");
@ -54,7 +42,7 @@ mod tests {
#[test]
fn test_string_err() {
let p = Pair { x: 0, y: 1234 };
let mut s = String::<U4>::new();
let mut s = String::<4>::new();
assert!(uwrite!(s, "{:?}", p).is_err());
}
@ -63,7 +51,7 @@ mod tests {
let a = 123;
let b = Pair { x: 0, y: 1234 };
let mut v = Vec::<u8, U32>::new();
let mut v = Vec::<u8, 32>::new();
uwrite!(v, "{} -> {:?}", a, b).unwrap();
assert_eq!(v, b"123 -> Pair { x: 0, y: 1234 }");

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,6 @@
//! Collections of `Send`-able things are `Send`
use heapless::{
consts,
spsc::{Consumer, Producer, Queue},
Vec, HistoryBuffer,
};
use heapless::Vec;
#[test]
fn send() {
@ -18,9 +14,9 @@ fn send() {
{
}
is_send::<Consumer<IsSend, consts::U4>>();
is_send::<Producer<IsSend, consts::U4>>();
is_send::<Queue<IsSend, consts::U4>>();
is_send::<Vec<IsSend, consts::U4>>();
is_send::<HistoryBuffer<IsSend, consts::U4>>();
// is_send::<Consumer<IsSend, consts::U4>>();
// is_send::<Producer<IsSend, consts::U4>>();
// is_send::<Queue<IsSend, consts::U4>>();
is_send::<Vec<IsSend, 4>>();
// is_send::<HistoryBuffer<IsSend, consts::U4>>();
}

View File

@ -1,265 +1,265 @@
#![deny(rust_2018_compatibility)]
#![deny(rust_2018_idioms)]
#![deny(warnings)]
// #![deny(rust_2018_compatibility)]
// #![deny(rust_2018_idioms)]
// #![deny(warnings)]
use std::{sync::mpsc, thread};
// use std::{sync::mpsc, thread};
use generic_array::typenum::Unsigned;
use heapless::{consts::*, mpmc::Q64, spsc};
use scoped_threadpool::Pool;
// use generic_array::typenum::Unsigned;
// use heapless::{consts::*, mpmc::Q64, spsc};
// use scoped_threadpool::Pool;
#[test]
fn once() {
static mut RB: spsc::Queue<i32, U4> = spsc::Queue(heapless::i::Queue::new());
// #[test]
// fn once() {
// static mut RB: spsc::Queue<i32, U4> = spsc::Queue(heapless::i::Queue::new());
let rb = unsafe { &mut RB };
// let rb = unsafe { &mut RB };
rb.enqueue(0).unwrap();
// rb.enqueue(0).unwrap();
let (mut p, mut c) = rb.split();
// let (mut p, mut c) = rb.split();
p.enqueue(1).unwrap();
// p.enqueue(1).unwrap();
thread::spawn(move || {
p.enqueue(1).unwrap();
});
// thread::spawn(move || {
// p.enqueue(1).unwrap();
// });
thread::spawn(move || {
c.dequeue().unwrap();
});
}
// thread::spawn(move || {
// c.dequeue().unwrap();
// });
// }
#[test]
fn twice() {
static mut RB: spsc::Queue<i32, U4> = spsc::Queue(heapless::i::Queue::new());
// #[test]
// fn twice() {
// static mut RB: spsc::Queue<i32, U4> = spsc::Queue(heapless::i::Queue::new());
let rb = unsafe { &mut RB };
// let rb = unsafe { &mut RB };
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();
// rb.enqueue(0).unwrap();
// rb.enqueue(1).unwrap();
let (mut p, mut c) = rb.split();
// let (mut p, mut c) = rb.split();
thread::spawn(move || {
p.enqueue(2).unwrap();
p.enqueue(3).unwrap();
});
// thread::spawn(move || {
// p.enqueue(2).unwrap();
// p.enqueue(3).unwrap();
// });
thread::spawn(move || {
c.dequeue().unwrap();
c.dequeue().unwrap();
});
}
// thread::spawn(move || {
// c.dequeue().unwrap();
// c.dequeue().unwrap();
// });
// }
#[test]
fn scoped() {
let mut rb: spsc::Queue<i32, U4> = spsc::Queue::new();
// #[test]
// fn scoped() {
// let mut rb: spsc::Queue<i32, U4> = spsc::Queue::new();
rb.enqueue(0).unwrap();
// rb.enqueue(0).unwrap();
{
let (mut p, mut c) = rb.split();
// {
// let (mut p, mut c) = rb.split();
Pool::new(2).scoped(move |scope| {
scope.execute(move || {
p.enqueue(1).unwrap();
});
// Pool::new(2).scoped(move |scope| {
// scope.execute(move || {
// p.enqueue(1).unwrap();
// });
scope.execute(move || {
c.dequeue().unwrap();
});
});
}
// scope.execute(move || {
// c.dequeue().unwrap();
// });
// });
// }
rb.dequeue().unwrap();
}
// rb.dequeue().unwrap();
// }
#[test]
fn contention() {
type N = U1024;
// #[test]
// fn contention() {
// type N = U1024;
let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
// let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
{
let (mut p, mut c) = rb.split();
// {
// let (mut p, mut c) = rb.split();
Pool::new(2).scoped(move |scope| {
scope.execute(move || {
let mut sum: u32 = 0;
// Pool::new(2).scoped(move |scope| {
// scope.execute(move || {
// let mut sum: u32 = 0;
for i in 0..(2 * N::to_u32()) {
sum = sum.wrapping_add(i);
while let Err(_) = p.enqueue(i as u8) {}
}
// for i in 0..(2 * N::to_u32()) {
// sum = sum.wrapping_add(i);
// while let Err(_) = p.enqueue(i as u8) {}
// }
println!("producer: {}", sum);
});
// println!("producer: {}", sum);
// });
scope.execute(move || {
let mut sum: u32 = 0;
// scope.execute(move || {
// let mut sum: u32 = 0;
for _ in 0..(2 * N::to_u32()) {
loop {
match c.dequeue() {
Some(v) => {
sum = sum.wrapping_add(v as u32);
break;
}
_ => {}
}
}
}
// for _ in 0..(2 * N::to_u32()) {
// loop {
// match c.dequeue() {
// Some(v) => {
// sum = sum.wrapping_add(v as u32);
// break;
// }
// _ => {}
// }
// }
// }
println!("consumer: {}", sum);
});
});
}
// println!("consumer: {}", sum);
// });
// });
// }
assert!(rb.is_empty());
}
// assert!(rb.is_empty());
// }
#[test]
fn mpmc_contention() {
const N: u32 = 64;
// #[test]
// fn mpmc_contention() {
// const N: u32 = 64;
static Q: Q64<u32> = Q64::new();
// static Q: Q64<u32> = Q64::new();
let (s, r) = mpsc::channel();
Pool::new(2).scoped(|scope| {
let s1 = s.clone();
scope.execute(move || {
let mut sum: u32 = 0;
// let (s, r) = mpsc::channel();
// Pool::new(2).scoped(|scope| {
// let s1 = s.clone();
// scope.execute(move || {
// let mut sum: u32 = 0;
for i in 0..(16 * N) {
sum = sum.wrapping_add(i);
while let Err(_) = Q.enqueue(i) {}
}
// for i in 0..(16 * N) {
// sum = sum.wrapping_add(i);
// while let Err(_) = Q.enqueue(i) {}
// }
s1.send(sum).unwrap();
});
// s1.send(sum).unwrap();
// });
let s2 = s.clone();
scope.execute(move || {
let mut sum: u32 = 0;
// let s2 = s.clone();
// scope.execute(move || {
// let mut sum: u32 = 0;
for _ in 0..(16 * N) {
loop {
match Q.dequeue() {
Some(v) => {
sum = sum.wrapping_add(v);
break;
}
_ => {}
}
}
}
// for _ in 0..(16 * N) {
// loop {
// match Q.dequeue() {
// Some(v) => {
// sum = sum.wrapping_add(v);
// break;
// }
// _ => {}
// }
// }
// }
s2.send(sum).unwrap();
});
});
// s2.send(sum).unwrap();
// });
// });
assert_eq!(r.recv().unwrap(), r.recv().unwrap());
}
// assert_eq!(r.recv().unwrap(), r.recv().unwrap());
// }
#[test]
fn unchecked() {
type N = U1024;
// #[test]
// fn unchecked() {
// type N = U1024;
let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
// let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
for _ in 0..N::to_usize() / 2 {
rb.enqueue(1).unwrap();
}
// for _ in 0..N::to_usize() / 2 {
// rb.enqueue(1).unwrap();
// }
{
let (mut p, mut c) = rb.split();
// {
// let (mut p, mut c) = rb.split();
Pool::new(2).scoped(move |scope| {
scope.execute(move || {
for _ in 0..N::to_usize() / 2 {
unsafe {
p.enqueue_unchecked(2);
}
}
});
// Pool::new(2).scoped(move |scope| {
// scope.execute(move || {
// for _ in 0..N::to_usize() / 2 {
// unsafe {
// p.enqueue_unchecked(2);
// }
// }
// });
scope.execute(move || {
let mut sum: usize = 0;
// scope.execute(move || {
// let mut sum: usize = 0;
for _ in 0..N::to_usize() / 2 {
sum = sum.wrapping_add(usize::from(unsafe { c.dequeue_unchecked() }));
}
// for _ in 0..N::to_usize() / 2 {
// sum = sum.wrapping_add(usize::from(unsafe { c.dequeue_unchecked() }));
// }
assert_eq!(sum, N::to_usize() / 2);
});
});
}
// assert_eq!(sum, N::to_usize() / 2);
// });
// });
// }
assert_eq!(rb.len(), N::to_usize() / 2);
}
// assert_eq!(rb.len(), N::to_usize() / 2);
// }
#[test]
fn len_properly_wraps() {
type N = U3;
let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
// #[test]
// fn len_properly_wraps() {
// type N = U3;
// let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
rb.enqueue(1).unwrap();
assert_eq!(rb.len(), 1);
rb.dequeue();
assert_eq!(rb.len(), 0);
rb.enqueue(2).unwrap();
assert_eq!(rb.len(), 1);
rb.enqueue(3).unwrap();
assert_eq!(rb.len(), 2);
rb.enqueue(4).unwrap();
assert_eq!(rb.len(), 3);
}
// rb.enqueue(1).unwrap();
// assert_eq!(rb.len(), 1);
// rb.dequeue();
// assert_eq!(rb.len(), 0);
// rb.enqueue(2).unwrap();
// assert_eq!(rb.len(), 1);
// rb.enqueue(3).unwrap();
// assert_eq!(rb.len(), 2);
// rb.enqueue(4).unwrap();
// assert_eq!(rb.len(), 3);
// }
#[test]
fn iterator_properly_wraps() {
type N = U3;
let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
// #[test]
// fn iterator_properly_wraps() {
// type N = U3;
// let mut rb: spsc::Queue<u8, N> = spsc::Queue::new();
rb.enqueue(1).unwrap();
rb.dequeue();
rb.enqueue(2).unwrap();
rb.enqueue(3).unwrap();
rb.enqueue(4).unwrap();
let expected = [2, 3, 4];
let mut actual = [0, 0, 0];
for (idx, el) in rb.iter().enumerate() {
actual[idx] = *el;
}
assert_eq!(expected, actual)
}
// rb.enqueue(1).unwrap();
// rb.dequeue();
// rb.enqueue(2).unwrap();
// rb.enqueue(3).unwrap();
// rb.enqueue(4).unwrap();
// let expected = [2, 3, 4];
// let mut actual = [0, 0, 0];
// for (idx, el) in rb.iter().enumerate() {
// actual[idx] = *el;
// }
// assert_eq!(expected, actual)
// }
#[test]
fn pool() {
use heapless::pool::singleton::Pool as _;
// #[test]
// fn pool() {
// use heapless::pool::singleton::Pool as _;
static mut M: [u8; (N + 1) * 8] = [0; (N + 1) * 8];
const N: usize = 16 * 1024;
heapless::pool!(A: [u8; 8]);
// static mut M: [u8; (N + 1) * 8] = [0; (N + 1) * 8];
// const N: usize = 16 * 1024;
// heapless::pool!(A: [u8; 8]);
A::grow(unsafe { &mut M });
// A::grow(unsafe { &mut M });
Pool::new(2).scoped(move |scope| {
scope.execute(move || {
for _ in 0..N / 4 {
let a = A::alloc().unwrap();
let b = A::alloc().unwrap();
drop(a);
let b = b.init([1; 8]);
drop(b);
}
});
// Pool::new(2).scoped(move |scope| {
// scope.execute(move || {
// for _ in 0..N / 4 {
// let a = A::alloc().unwrap();
// let b = A::alloc().unwrap();
// drop(a);
// let b = b.init([1; 8]);
// drop(b);
// }
// });
scope.execute(move || {
for _ in 0..N / 2 {
let a = A::alloc().unwrap();
let a = a.init([2; 8]);
drop(a);
}
});
});
}
// scope.execute(move || {
// for _ in 0..N / 2 {
// let a = A::alloc().unwrap();
// let a = a.init([2; 8]);
// drop(a);
// }
// });
// });
// }