diff --git a/Cargo.toml b/Cargo.toml
index f74f9755..18d4cdd3 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -19,9 +19,6 @@ name = "heapless"
repository = "https://github.com/japaric/heapless"
version = "0.5.0-alpha.1"
-[features]
-const-fn = []
-
[dev-dependencies]
scoped_threadpool = "0.1.8"
@@ -33,7 +30,4 @@ hash32 = "0.1.0"
[dependencies.serde]
version = "1"
optional = true
-default-features = false
-
-[package.metadata.docs.rs]
-features = ["const-fn"]
\ No newline at end of file
+default-features = false
\ No newline at end of file
diff --git a/ci/script.sh b/ci/script.sh
index 6665d22a..47772ea2 100644
--- a/ci/script.sh
+++ b/ci/script.sh
@@ -1,27 +1,20 @@
set -euxo pipefail
main() {
+ cargo check --target $TARGET
cargo check --target $TARGET --features 'serde'
- if [ $TRAVIS_RUST_VERSION = nightly ]; then
- cargo check --target $TARGET --features 'const-fn'
- fi
if [ $TARGET = x86_64-unknown-linux-gnu ]; then
cargo test --target $TARGET --features 'serde'
cargo test --target $TARGET --release --features 'serde'
if [ $TRAVIS_RUST_VERSION = nightly ]; then
- cargo test --target $TARGET --features 'const-fn'
- cargo test --target $TARGET --release --features 'const-fn'
-
export RUSTFLAGS="-Z sanitizer=thread"
export RUST_TEST_THREADS=1
export TSAN_OPTIONS="suppressions=$(pwd)/blacklist.txt"
cargo test --test tsan --target $TARGET
- cargo test --test tsan --target $TARGET --features 'const-fn'
cargo test --test tsan --target $TARGET --release
- cargo test --test tsan --target $TARGET --release --features 'const-fn'
fi
fi
}
diff --git a/src/binary_heap.rs b/src/binary_heap.rs
index 7870fed1..a1d96ff2 100644
--- a/src/binary_heap.rs
+++ b/src/binary_heap.rs
@@ -16,9 +16,7 @@ use core::{
ptr, slice,
};
-use generic_array::ArrayLength;
-
-use crate::Vec;
+use generic_array::{ArrayLength, GenericArray};
/// Min-heap
pub enum Min {}
@@ -46,6 +44,17 @@ unsafe impl Kind for Max {
}
}
+impl crate::i::BinaryHeap {
+ /// `BinaryHeap` `const` constructor; wrap the returned value in
+ /// [`BinaryHeap`](../struct.BinaryHeap.html)
+ pub const fn new() -> Self {
+ Self {
+ _kind: PhantomData,
+ data: crate::i::Vec::new(),
+ }
+ }
+}
+
/// A priority queue implemented with a binary heap.
///
/// This can be either a min-heap or a max-heap.
@@ -93,15 +102,13 @@ unsafe impl Kind for Max {
/// // The heap should now be empty.
/// assert!(heap.is_empty())
/// ```
-pub struct BinaryHeap
+pub struct BinaryHeap(
+ #[doc(hidden)] pub crate::i::BinaryHeap, KIND>,
+)
where
T: Ord,
N: ArrayLength,
- KIND: Kind,
-{
- _kind: PhantomData,
- data: Vec,
-}
+ KIND: Kind;
impl BinaryHeap
where
@@ -110,29 +117,27 @@ where
K: Kind,
{
/* Constructors */
-
- const_fn! {
- /// Creates an empty BinaryHeap as a $K-heap.
- ///
- /// ```
- /// use heapless::binary_heap::{BinaryHeap, Max};
- /// use heapless::consts::*;
- ///
- /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
- /// heap.push(4).unwrap();
- /// ```
- pub const fn new() -> Self {
- BinaryHeap {
- _kind: PhantomData,
- data: Vec::new(),
- }
- }
+ /// Creates an empty BinaryHeap as a $K-heap.
+ ///
+ /// ```
+ /// use heapless::binary_heap::{BinaryHeap, Max};
+ /// use heapless::consts::*;
+ ///
+ /// // allocate the binary heap on the stack
+ /// let mut heap: BinaryHeap<_, U8, Max> = BinaryHeap::new();
+ /// heap.push(4).unwrap();
+ ///
+ /// // allocate the binary heap in a static variable
+ /// static mut HEAP: BinaryHeap = BinaryHeap(heapless::i::BinaryHeap::new());
+ /// ```
+ pub fn new() -> Self {
+ BinaryHeap(crate::i::BinaryHeap::new())
}
/* Public API */
/// Returns the capacity of the binary heap.
pub fn capacity(&self) -> usize {
- self.data.capacity()
+ self.0.data.capacity()
}
/// Drops all items from the binary heap.
@@ -152,7 +157,7 @@ where
/// assert!(heap.is_empty());
/// ```
pub fn clear(&mut self) {
- self.data.clear()
+ self.0.data.clear()
}
/// Returns the length of the binary heap.
@@ -168,7 +173,7 @@ where
/// assert_eq!(heap.len(), 2);
/// ```
pub fn len(&self) -> usize {
- self.data.len()
+ self.0.data.len
}
/// Checks if the binary heap is empty.
@@ -210,7 +215,7 @@ where
/// }
/// ```
pub fn iter(&self) -> slice::Iter<'_, T> {
- self.data.iter()
+ self.0.data.as_slice().iter()
}
/// Returns a mutable iterator visiting all values in the underlying vector, in arbitrary order.
@@ -218,7 +223,7 @@ where
/// **WARNING** Mutating the items in the binary heap can leave the heap in an inconsistent
/// state.
pub fn iter_mut(&mut self) -> slice::IterMut<'_, T> {
- self.data.iter_mut()
+ self.0.data.as_mut_slice().iter_mut()
}
/// Returns the *top* (greatest if max-heap, smallest if min-heap) item in the binary heap, or
@@ -237,7 +242,7 @@ where
/// assert_eq!(heap.peek(), Some(&5));
/// ```
pub fn peek(&self) -> Option<&T> {
- self.data.get(0)
+ self.0.data.as_slice().get(0)
}
/// Removes the *top* (greatest if max-heap, smallest if min-heap) item from the binary heap and
@@ -266,10 +271,10 @@ where
/// Removes the *top* (greatest if max-heap, smallest if min-heap) item from the binary heap and
/// returns it, without checking if the binary heap is empty.
pub unsafe fn pop_unchecked(&mut self) -> T {
- let mut item = self.data.pop_unchecked();
+ let mut item = self.0.data.pop_unchecked();
if !self.is_empty() {
- mem::swap(&mut item, &mut self.data[0]);
+ mem::swap(&mut item, self.0.data.as_mut_slice().get_unchecked_mut(0));
self.sift_down_to_bottom(0);
}
item
@@ -290,7 +295,7 @@ where
/// assert_eq!(heap.peek(), Some(&5));
/// ```
pub fn push(&mut self, item: T) -> Result<(), T> {
- if self.data.is_full() {
+ if self.0.data.is_full() {
return Err(item);
}
@@ -301,7 +306,7 @@ where
/// Pushes an item onto the binary heap without first checking if it's full.
pub unsafe fn push_unchecked(&mut self, item: T) {
let old_len = self.len();
- self.data.push_unchecked(item);
+ self.0.data.push_unchecked(item);
self.sift_up(0, old_len);
}
@@ -310,7 +315,7 @@ where
let end = self.len();
let start = pos;
unsafe {
- let mut hole = Hole::new(&mut self.data, pos);
+ let mut hole = Hole::new(self.0.data.as_mut_slice(), pos);
let mut child = 2 * pos + 1;
while child < end {
let right = child + 1;
@@ -329,7 +334,7 @@ where
fn sift_up(&mut self, start: usize, pos: usize) -> usize {
unsafe {
// Take out the value at `pos` and create a hole.
- let mut hole = Hole::new(&mut self.data, pos);
+ let mut hole = Hole::new(self.0.data.as_mut_slice(), pos);
while hole.pos() > start {
let parent = (hole.pos() - 1) / 2;
@@ -433,10 +438,21 @@ where
T: Ord + Clone,
{
fn clone(&self) -> Self {
- Self {
- _kind: self._kind,
- data: self.data.clone(),
- }
+ BinaryHeap(crate::i::BinaryHeap {
+ _kind: self.0._kind,
+ data: self.0.data.clone(),
+ })
+ }
+}
+
+impl Drop for BinaryHeap
+where
+ N: ArrayLength,
+ K: Kind,
+ T: Ord,
+{
+ fn drop(&mut self) {
+ unsafe { ptr::drop_in_place(self.0.data.as_mut_slice()) }
}
}
@@ -474,10 +490,9 @@ mod tests {
consts::*,
};
- #[cfg(feature = "const-fn")]
#[test]
fn static_new() {
- static mut _B: BinaryHeap = BinaryHeap::new();
+ static mut _B: BinaryHeap = BinaryHeap(crate::i::BinaryHeap::new());
}
#[test]
diff --git a/src/const_fn.rs b/src/const_fn.rs
deleted file mode 100644
index e4286c64..00000000
--- a/src/const_fn.rs
+++ /dev/null
@@ -1,35 +0,0 @@
-// Make functions `const` if the `const-fn` feature is active.
-// The meta attributes are in place to keep doc comments with the functions.
-// The function definition incl. annotations and doc comments must be enclodes
-// by the marco invocation.
-macro_rules! const_fn {
- ($(#[$attr:meta])* pub const unsafe fn $($f:tt)*) => (
-
- $(#[$attr])*
- #[cfg(feature = "const-fn")]
- pub const unsafe fn $($f)*
-
- $(#[$attr])*
- #[cfg(not(feature = "const-fn"))]
- pub unsafe fn $($f)*
- );
- ($(#[$attr:meta])* pub const fn $($f:tt)*) => (
-
- $(#[$attr])*
- #[cfg(feature = "const-fn")]
- pub const fn $($f)*
-
- $(#[$attr])*
- #[cfg(not(feature = "const-fn"))]
- pub fn $($f)*
- );
- ($(#[$attr:meta])* const fn $($f:tt)*) => (
- $(#[$attr])*
- #[cfg(feature = "const-fn")]
- const fn $($f)*
-
- $(#[$attr])*
- #[cfg(not(feature = "const-fn"))]
- fn $($f)*
- );
-}
diff --git a/src/i.rs b/src/i.rs
new file mode 100644
index 00000000..f870a823
--- /dev/null
+++ b/src/i.rs
@@ -0,0 +1,38 @@
+//! Unfortunate implementation detail required to construct `heapless` types in const context
+
+use core::{marker::PhantomData, mem::MaybeUninit};
+
+use crate::spsc::{Atomic, MultiCore};
+
+/// `const-fn` version of [`BinaryHeap`](../binary_heap/struct.BinaryHeap.html)
+pub struct BinaryHeap {
+ pub(crate) _kind: PhantomData,
+ pub(crate) data: Vec,
+}
+
+/// `const-fn` version of [`LinearMap`](../struct.LinearMap.html)
+pub struct LinearMap {
+ pub(crate) buffer: Vec,
+}
+
+/// `const-fn` version of [`spsc::Queue`](../spsc/struct.Queue.html)
+pub struct Queue {
+ // this is from where we dequeue items
+ pub(crate) head: Atomic,
+
+ // this is where we enqueue new items
+ pub(crate) tail: Atomic,
+
+ pub(crate) buffer: MaybeUninit,
+}
+
+/// `const-fn` version of [`String`](../struct.String.html)
+pub struct String {
+ pub(crate) vec: Vec,
+}
+
+/// `const-fn` version of [`Vec`](../struct.Vec.html)
+pub struct Vec {
+ pub(crate) buffer: MaybeUninit,
+ pub(crate) len: usize,
+}
diff --git a/src/lib.rs b/src/lib.rs
index eeef37fe..bedf8f9d 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -18,12 +18,11 @@
//! assert_eq!(xs.pop(), Some(42));
//!
//! // in a `static` variable
-//! // static mut XS: Vec = Vec::new(); // requires feature `const-fn`
+//! // (because `const-fn` has not been fully stabilized you need to use the helper structs in
+//! // the `i` module, which must be wrapped in a tuple struct)
+//! static mut XS: Vec = Vec(heapless::i::Vec::new());
//!
-//! // work around
-//! static mut XS: Option> = None;
-//! unsafe { XS = Some(Vec::new()) };
-//! let xs = unsafe { XS.as_mut().unwrap() };
+//! let xs = unsafe { &mut XS };
//!
//! xs.push(42);
//! assert_eq!(xs.pop(), Some(42));
@@ -40,10 +39,10 @@
//! (which is bad / unacceptable for hard real time applications).
//!
//! `heapless` data structures don't use a memory allocator which means no risk of an uncatchable
-//! Out Of Memory (OOM) condition (which defaults to abort) while performing operations
-//! on them. It's certainly possible to run out of capacity while growing `heapless` data
-//! structures, but the API lets you handle this possibility by returning a `Result` on operations
-//! that may exhaust the capacity of the data structure.
+//! Out Of Memory (OOM) condition while performing operations on them. It's certainly possible to
+//! run out of capacity while growing `heapless` data structures, but the API lets you handle this
+//! possibility by returning a `Result` on operations that may exhaust the capacity of the data
+//! structure.
//!
//! List of currently implemented data structures:
//!
@@ -59,28 +58,7 @@
//!
//! This crate is guaranteed to compile on stable Rust 1.36 and up with its default set of features.
//! It *might* compile on older versions but that may change in any new patch release.
-//!
-//! # Cargo features
-//!
-//! In order to target the Rust stable toolchain, there are some opt-in Cargo features. The features
-//! need to be enabled in `Cargo.toml` in order to use them. Once the underlying features in Rust
-//! are stable, these feature gates may be activated by default.
-//!
-//! Example of `Cargo.toml`:
-//!
-//! ``` text
-//! # ..
-//! [dependencies]
-//! heapless = { version = "0.4.0", features = ["const-fn"] }
-//! # ..
-//! ```
-//!
-//! Currently the following features are available and not active by default:
-//!
-//! - `"const-fn"` -- Enables the nightly `const_fn` and `untagged_unions` features and makes most
-//! `new` methods `const`. This way they can be used to initialize static memory at compile time.
-#![cfg_attr(feature = "const-fn", feature(const_fn))]
#![cfg_attr(not(test), no_std)]
#![deny(missing_docs)]
#![deny(rust_2018_compatibility)]
@@ -88,9 +66,6 @@
#![deny(warnings)]
#![feature(maybe_uninit)]
-#[macro_use]
-mod const_fn;
-
pub use binary_heap::BinaryHeap;
pub use generic_array::typenum::consts;
pub use generic_array::ArrayLength;
@@ -113,6 +88,7 @@ mod de;
mod ser;
pub mod binary_heap;
+pub mod i;
#[cfg(not(armv6m))]
pub mod pool;
pub mod spsc;
diff --git a/src/linear_map.rs b/src/linear_map.rs
index 24b9d32d..9dd4d74c 100644
--- a/src/linear_map.rs
+++ b/src/linear_map.rs
@@ -1,18 +1,25 @@
-use core::{borrow::Borrow, fmt, iter::FromIterator, mem, ops, slice};
+use core::{borrow::Borrow, fmt, iter::FromIterator, mem, ops, ptr, slice};
-use generic_array::ArrayLength;
+use generic_array::{ArrayLength, GenericArray};
use crate::Vec;
/// A fixed capacity map / dictionary that performs lookups via linear search
///
/// Note that as this map doesn't use hashing so most operations are **O(N)** instead of O(1)
-pub struct LinearMap
+pub struct LinearMap(#[doc(hidden)] pub crate::i::LinearMap>)
where
N: ArrayLength<(K, V)>,
- K: Eq,
-{
- buffer: Vec<(K, V), N>,
+ K: Eq;
+
+impl crate::i::LinearMap {
+ /// `LinearMap` `const` constructor; wrap the returned value in
+ /// [`LinearMap`](../struct.LinearMap.html)
+ pub const fn new() -> Self {
+ Self {
+ buffer: crate::i::Vec::new(),
+ }
+ }
}
impl LinearMap
@@ -20,20 +27,22 @@ where
N: ArrayLength<(K, V)>,
K: Eq,
{
- const_fn! {
- /// Creates an empty `LinearMap`
- ///
- /// # Examples
- ///
- /// ```
- /// use heapless::LinearMap;
- /// use heapless::consts::*;
- ///
- /// let mut map: LinearMap<&str, isize, U8> = LinearMap::new();
- /// ```
- pub const fn new() -> Self {
- LinearMap { buffer: Vec::new() }
- }
+ /// Creates an empty `LinearMap`
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use heapless::LinearMap;
+ /// use heapless::consts::*;
+ ///
+ /// // allocate the map on the stack
+ /// let mut map: LinearMap<&str, isize, U8> = LinearMap::new();
+ ///
+ /// // allocate the map in a static variable
+ /// static mut MAP: LinearMap<&str, isize, U8> = LinearMap(heapless::i::LinearMap::new());
+ /// ```
+ pub fn new() -> Self {
+ LinearMap(crate::i::LinearMap::new())
}
/// Returns the number of elements that the map can hold
@@ -46,11 +55,11 @@ where
/// use heapless::LinearMap;
/// use heapless::consts::*;
///
- /// let mut map: LinearMap<&str, isize, U8> = LinearMap::new();
+ /// let map: LinearMap<&str, isize, U8> = LinearMap::new();
/// assert_eq!(map.capacity(), 8);
/// ```
- pub fn capacity(&mut self) -> usize {
- self.buffer.capacity()
+ pub fn capacity(&self) -> usize {
+ N::to_usize()
}
/// Clears the map, removing all key-value pairs
@@ -69,7 +78,7 @@ where
/// assert!(map.is_empty());
/// ```
pub fn clear(&mut self) {
- self.buffer.clear()
+ self.0.buffer.clear()
}
/// Returns true if the map contains a value for the specified key.
@@ -159,7 +168,7 @@ where
/// assert_eq!(a.len(), 1);
/// ```
pub fn len(&self) -> usize {
- self.buffer.len()
+ self.0.buffer.len
}
/// Inserts a key-value pair into the map.
@@ -190,7 +199,7 @@ where
return Ok(Some(value));
}
- self.buffer.push((key, value))?;
+ self.0.buffer.push((key, value))?;
Ok(None)
}
@@ -232,7 +241,7 @@ where
/// ```
pub fn iter(&self) -> Iter<'_, K, V> {
Iter {
- iter: self.buffer.iter(),
+ iter: self.0.buffer.as_slice().iter(),
}
}
@@ -261,7 +270,7 @@ where
/// ```
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
IterMut {
- iter: self.buffer.iter_mut(),
+ iter: self.0.buffer.as_mut_slice().iter_mut(),
}
}
@@ -313,7 +322,7 @@ where
.find(|&(_, k)| k.borrow() == key)
.map(|(idx, _)| idx);
- idx.map(|idx| self.buffer.swap_remove(idx).1)
+ idx.map(|idx| self.0.buffer.swap_remove(idx).1)
}
/// An iterator visiting all values in arbitrary order
@@ -404,9 +413,9 @@ where
V: Clone,
{
fn clone(&self) -> Self {
- Self {
- buffer: self.buffer.clone(),
- }
+ Self(crate::i::LinearMap {
+ buffer: self.0.buffer.clone(),
+ })
}
}
@@ -431,7 +440,7 @@ where
I: IntoIterator- ,
{
let mut out = Self::new();
- out.buffer.extend(iter);
+ out.0.buffer.extend(iter);
out
}
}
@@ -463,9 +472,13 @@ where
type Item = (K, V);
type IntoIter = IntoIter;
- fn into_iter(self) -> Self::IntoIter {
+ fn into_iter(mut self) -> Self::IntoIter {
+ // FIXME this may result in a memcpy at runtime
+ let lm = mem::replace(&mut self.0, unsafe { mem::uninitialized() });
+ mem::forget(self);
+
Self::IntoIter {
- inner: self.buffer.into_iter(),
+ inner: crate::Vec(lm.buffer).into_iter(),
}
}
}
@@ -503,6 +516,16 @@ impl<'a, K, V> Clone for Iter<'a, K, V> {
}
}
+impl Drop for LinearMap
+where
+ N: ArrayLength<(K, V)>,
+ K: Eq,
+{
+ fn drop(&mut self) {
+ unsafe { ptr::drop_in_place(self.0.buffer.as_mut_slice()) }
+ }
+}
+
pub struct IterMut<'a, K, V> {
iter: slice::IterMut<'a, (K, V)>,
}
@@ -542,10 +565,9 @@ where
mod test {
use crate::{consts::*, LinearMap};
- #[cfg(feature = "const-fn")]
#[test]
fn static_new() {
- static mut _L: LinearMap = LinearMap::new();
+ static mut _L: LinearMap = LinearMap(crate::i::LinearMap::new());
}
#[test]
diff --git a/src/spsc/mod.rs b/src/spsc/mod.rs
index fb7e4278..4b2aa0cf 100644
--- a/src/spsc/mod.rs
+++ b/src/spsc/mod.rs
@@ -18,29 +18,25 @@ pub struct SingleCore;
// Atomic{U8,U16, Usize} with no CAS operations that works on targets that have "no atomic support"
// according to their specification
-struct Atomic
-where
- U: sealed::Uxx,
- C: sealed::XCore,
-{
+pub(crate) struct Atomic {
v: UnsafeCell,
c: PhantomData,
}
+impl Atomic {
+ pub(crate) const fn new(v: U) -> Self {
+ Atomic {
+ v: UnsafeCell::new(v),
+ c: PhantomData,
+ }
+ }
+}
+
impl Atomic
where
U: sealed::Uxx,
C: sealed::XCore,
{
- const_fn! {
- const fn new(v: U) -> Self {
- Atomic {
- v: UnsafeCell::new(v),
- c: PhantomData,
- }
- }
- }
-
fn get_mut(&mut self) -> &mut U {
unsafe { &mut *self.v.get() }
}
@@ -102,16 +98,13 @@ where
/// use heapless::spsc::Queue;
/// use heapless::consts::*;
///
-/// // static mut RB: Queue = Queue::new(); // requires feature `const-fn`
-///
-/// static mut RB: Option> = None;
+/// static mut RB: Queue = Queue(heapless::i::Queue::new());
///
/// enum Event { A, B }
///
/// fn main() {
-/// unsafe { RB = Some(Queue::new()) };
/// // NOTE(unsafe) beware of aliasing the `consumer` end point
-/// let mut consumer = unsafe { RB.as_mut().unwrap().split().1 };
+/// let mut consumer = unsafe { RB.split().1 };
///
/// loop {
/// // `dequeue` is a lockless operation
@@ -127,7 +120,7 @@ where
/// // this is a different execution context that can preempt `main`
/// fn interrupt_handler() {
/// // NOTE(unsafe) beware of aliasing the `producer` end point
-/// let mut producer = unsafe { RB.as_mut().unwrap().split().0 };
+/// let mut producer = unsafe { RB.split().0 };
/// # let condition = true;
///
/// // ..
@@ -141,20 +134,13 @@ where
/// // ..
/// }
/// ```
-pub struct Queue
+pub struct Queue(
+ #[doc(hidden)] pub crate::i::Queue, U, C>,
+)
where
N: ArrayLength,
U: sealed::Uxx,
- C: sealed::XCore,
-{
- // this is from where we dequeue items
- head: Atomic,
-
- // this is where we enqueue new items
- tail: Atomic,
-
- buffer: MaybeUninit>,
-}
+ C: sealed::XCore;
impl Queue
where
@@ -192,8 +178,8 @@ where
}
fn len_usize(&self) -> usize {
- let head = self.head.load_relaxed().into();
- let tail = self.tail.load_relaxed().into();
+ let head = self.0.head.load_relaxed().into();
+ let tail = self.0.tail.load_relaxed().into();
tail.wrapping_sub(head)
}
@@ -290,14 +276,20 @@ macro_rules! impl_ {
where
N: ArrayLength,
{
- const_fn! {
- /// Creates an empty queue with a fixed capacity of `N`
- pub const fn $uxx() -> Self {
- Queue {
- buffer: MaybeUninit::uninit(),
- head: Atomic::new(0),
- tail: Atomic::new(0),
- }
+ /// Creates an empty queue with a fixed capacity of `N`
+ pub fn $uxx() -> Self {
+ Queue(crate::i::Queue::$uxx())
+ }
+ }
+
+ impl crate::i::Queue {
+ /// `spsc::Queue` `const` constructor; wrap the returned value in
+ /// [`spsc::Queue`](struct.Queue.html)
+ pub const fn $uxx() -> Self {
+ crate::i::Queue {
+ buffer: MaybeUninit::uninit(),
+ head: Atomic::new(0),
+ tail: Atomic::new(0),
}
}
}
@@ -306,14 +298,20 @@ macro_rules! impl_ {
where
N: ArrayLength,
{
- const_fn! {
- /// Creates an empty queue with a fixed capacity of `N` (single core variant)
- pub const unsafe fn $uxx_sc() -> Self {
- Queue {
- buffer: MaybeUninit::uninit(),
- head: Atomic::new(0),
- tail: Atomic::new(0),
- }
+ /// Creates an empty queue with a fixed capacity of `N` (single core variant)
+ pub unsafe fn $uxx_sc() -> Self {
+ Queue(crate::i::Queue::$uxx_sc())
+ }
+ }
+
+ impl crate::i::Queue {
+ /// `spsc::Queue` `const` constructor; wrap the returned value in
+ /// [`spsc::Queue`](struct.Queue.html)
+ pub const unsafe fn $uxx_sc() -> Self {
+ crate::i::Queue {
+ buffer: MaybeUninit::uninit(),
+ head: Atomic::new(0),
+ tail: Atomic::new(0),
}
}
}
@@ -327,10 +325,10 @@ macro_rules! impl_ {
pub fn dequeue(&mut self) -> Option {
let cap = self.capacity();
- let head = self.head.get_mut();
- let tail = self.tail.get_mut();
+ let head = self.0.head.get_mut();
+ let tail = self.0.tail.get_mut();
- let p = self.buffer.as_ptr();
+ let p = self.0.buffer.as_ptr();
if *head != *tail {
let item = unsafe { (p as *const T).add(usize::from(*head % cap)).read() };
@@ -346,8 +344,8 @@ macro_rules! impl_ {
/// Returns back the `item` if the queue is full
pub fn enqueue(&mut self, item: T) -> Result<(), T> {
let cap = self.capacity();
- let head = *self.head.get_mut();
- let tail = *self.tail.get_mut();
+ let head = *self.0.head.get_mut();
+ let tail = *self.0.tail.get_mut();
if tail.wrapping_sub(head) > cap - 1 {
Err(item)
@@ -367,12 +365,12 @@ macro_rules! impl_ {
/// twice.
pub unsafe fn enqueue_unchecked(&mut self, item: T) {
let cap = self.capacity();
- let tail = self.tail.get_mut();
+ let tail = self.0.tail.get_mut();
// NOTE(ptr::write) the memory slot that we are about to write to is
// uninitialized. We use `ptr::write` to avoid running `T`'s destructor on the
// uninitialized memory
- (self.buffer.as_mut_ptr() as *mut T)
+ (self.0.buffer.as_mut_ptr() as *mut T)
.add(usize::from(*tail % cap))
.write(item);
*tail = tail.wrapping_add(1);
@@ -380,8 +378,8 @@ macro_rules! impl_ {
/// Returns the number of elements in the queue
pub fn len(&self) -> $uxx {
- let head = self.head.load_relaxed();
- let tail = self.tail.load_relaxed();
+ let head = self.0.head.load_relaxed();
+ let tail = self.0.tail.load_relaxed();
if head > tail {
tail.wrapping_sub(head)
@@ -398,11 +396,12 @@ macro_rules! impl_ {
C: sealed::XCore,
{
fn clone(&self) -> Self {
- let mut new: Queue = Queue {
+ let mut new: Queue = Queue(crate::i::Queue {
buffer: MaybeUninit::uninit(),
head: Atomic::new(0),
tail: Atomic::new(0),
- };
+ });
+
for s in self.iter() {
unsafe {
// NOTE(unsafe) new.capacity() == self.capacity() <= self.len()
@@ -416,15 +415,29 @@ macro_rules! impl_ {
};
}
+impl crate::i::Queue {
+ /// `spsc::Queue` `const` constructor; wrap the returned value in
+ /// [`spsc::Queue`](struct.Queue.html)
+ pub const fn new() -> Self {
+ crate::i::Queue::usize()
+ }
+}
+
impl Queue
where
N: ArrayLength,
{
- const_fn! {
- /// Alias for [`spsc::Queue::usize`](struct.Queue.html#method.usize)
- pub const fn new() -> Self {
- Queue::usize()
- }
+ /// Alias for [`spsc::Queue::usize`](struct.Queue.html#method.usize)
+ pub fn new() -> Self {
+ Queue(crate::i::Queue::new())
+ }
+}
+
+impl crate::i::Queue {
+ /// `spsc::Queue` `const` constructor; wrap the returned value in
+ /// [`spsc::Queue`](struct.Queue.html)
+ pub const unsafe fn new_sc() -> Self {
+ crate::i::Queue::usize_sc()
}
}
@@ -432,11 +445,9 @@ impl Queue
where
N: ArrayLength,
{
- const_fn! {
- /// Alias for [`spsc::Queue::usize_sc`](struct.Queue.html#method.usize_sc)
- pub const unsafe fn new_sc() -> Self {
- Queue::usize_sc()
- }
+ /// Alias for [`spsc::Queue::usize_sc`](struct.Queue.html#method.usize_sc)
+ pub unsafe fn new_sc() -> Self {
+ Queue(crate::i::Queue::new_sc())
}
}
@@ -520,10 +531,10 @@ macro_rules! iterator {
fn next(&mut self) -> Option<$elem> {
if self.index < self.len {
- let head = self.rb.head.load_relaxed().into();
+ let head = self.rb.0.head.load_relaxed().into();
let cap = self.rb.capacity().into();
- let ptr = self.rb.buffer.$asptr() as $ptr;
+ let ptr = self.rb.0.buffer.$asptr() as $ptr;
let i = (head + self.index) % cap;
self.index += 1;
Some(unsafe { $mkref!(*ptr.offset(i as isize)) })
@@ -556,10 +567,9 @@ mod tests {
use crate::{consts::*, spsc::Queue};
- #[cfg(feature = "const-fn")]
#[test]
fn static_new() {
- static mut _Q: Queue = Queue::new();
+ static mut _Q: Queue = Queue(crate::i::Queue::new());
}
#[test]
diff --git a/src/spsc/split.rs b/src/spsc/split.rs
index c10b56fe..3bf7df91 100644
--- a/src/spsc/split.rs
+++ b/src/spsc/split.rs
@@ -79,15 +79,15 @@ macro_rules! impl_ {
/// Returns if there are any items to dequeue. When this returns true, at least the
/// first subsequent dequeue will succeed.
pub fn ready(&self) -> bool {
- let head = unsafe { self.rb.as_ref().head.load_relaxed() };
- let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; // ▼
+ let head = unsafe { self.rb.as_ref().0.head.load_relaxed() };
+ let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; // ▼
return head != tail;
}
/// Returns the item in the front of the queue, or `None` if the queue is empty
pub fn dequeue(&mut self) -> Option {
- let head = unsafe { self.rb.as_ref().head.load_relaxed() };
- let tail = unsafe { self.rb.as_ref().tail.load_acquire() }; // ▼
+ let head = unsafe { self.rb.as_ref().0.head.load_relaxed() };
+ let tail = unsafe { self.rb.as_ref().0.tail.load_acquire() }; // ▼
if head != tail {
Some(unsafe { self._dequeue(head) }) // ▲
@@ -102,8 +102,8 @@ macro_rules! impl_ {
///
/// If the queue is empty this is equivalent to calling `mem::uninitialized`
pub unsafe fn dequeue_unchecked(&mut self) -> T {
- let head = self.rb.as_ref().head.load_relaxed();
- debug_assert_ne!(head, self.rb.as_ref().tail.load_acquire());
+ let head = self.rb.as_ref().0.head.load_relaxed();
+ debug_assert_ne!(head, self.rb.as_ref().0.tail.load_acquire());
self._dequeue(head) // ▲
}
@@ -112,10 +112,10 @@ macro_rules! impl_ {
let cap = rb.capacity();
- let item = (rb.buffer.as_ptr() as *const T)
+ let item = (rb.0.buffer.as_ptr() as *const T)
.add(usize::from(head % cap))
.read();
- rb.head.store_release(head.wrapping_add(1)); // ▲
+ rb.0.head.store_release(head.wrapping_add(1)); // ▲
item
}
}
@@ -130,13 +130,13 @@ macro_rules! impl_ {
pub fn ready(&self) -> bool {
let cap = unsafe { self.rb.as_ref().capacity() };
- let tail = unsafe { self.rb.as_ref().tail.load_relaxed() };
+ let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() };
// NOTE we could replace this `load_acquire` with a `load_relaxed` and this method
// would be sound on most architectures but that change would result in UB according
// to the C++ memory model, which is what Rust currently uses, so we err on the side
// of caution and stick to `load_acquire`. Check issue google#sanitizers#882 for
// more details.
- let head = unsafe { self.rb.as_ref().head.load_acquire() };
+ let head = unsafe { self.rb.as_ref().0.head.load_acquire() };
return head.wrapping_add(cap) != tail;
}
@@ -145,13 +145,13 @@ macro_rules! impl_ {
/// Returns back the `item` if the queue is full
pub fn enqueue(&mut self, item: T) -> Result<(), T> {
let cap = unsafe { self.rb.as_ref().capacity() };
- let tail = unsafe { self.rb.as_ref().tail.load_relaxed() };
+ let tail = unsafe { self.rb.as_ref().0.tail.load_relaxed() };
// NOTE we could replace this `load_acquire` with a `load_relaxed` and this method
// would be sound on most architectures but that change would result in UB according
// to the C++ memory model, which is what Rust currently uses, so we err on the side
// of caution and stick to `load_acquire`. Check issue google#sanitizers#882 for
// more details.
- let head = unsafe { self.rb.as_ref().head.load_acquire() }; // ▼
+ let head = unsafe { self.rb.as_ref().0.head.load_acquire() }; // ▼
if tail.wrapping_sub(head) > cap - 1 {
Err(item)
@@ -170,8 +170,8 @@ macro_rules! impl_ {
/// to create a copy of `item`, which could result in `T`'s destructor running on `item`
/// twice.
pub unsafe fn enqueue_unchecked(&mut self, item: T) {
- let tail = self.rb.as_ref().tail.load_relaxed();
- debug_assert_ne!(tail.wrapping_add(1), self.rb.as_ref().head.load_acquire());
+ let tail = self.rb.as_ref().0.tail.load_relaxed();
+ debug_assert_ne!(tail.wrapping_add(1), self.rb.as_ref().0.head.load_acquire());
self._enqueue(tail, item); // ▲
}
@@ -183,10 +183,10 @@ macro_rules! impl_ {
// NOTE(ptr::write) the memory slot that we are about to write to is
// uninitialized. We use `ptr::write` to avoid running `T`'s destructor on the
// uninitialized memory
- (rb.buffer.as_mut_ptr() as *mut T)
+ (rb.0.buffer.as_mut_ptr() as *mut T)
.add(usize::from(tail % cap))
.write(item);
- rb.tail.store_release(tail.wrapping_add(1)); // ▲
+ rb.0.tail.store_release(tail.wrapping_add(1)); // ▲
}
}
};
diff --git a/src/string.rs b/src/string.rs
index 83a3d857..716bace4 100644
--- a/src/string.rs
+++ b/src/string.rs
@@ -1,42 +1,50 @@
-use core::{fmt, fmt::Write, hash, ops, str, str::Utf8Error};
+use core::{fmt, fmt::Write, hash, mem, ops, str, str::Utf8Error};
use generic_array::{
typenum::{consts::*, IsGreaterOrEqual},
- ArrayLength,
+ ArrayLength, GenericArray,
};
use hash32;
use crate::Vec;
/// A fixed capacity [`String`](https://doc.rust-lang.org/std/string/struct.String.html)
-pub struct String
+pub struct String(#[doc(hidden)] pub crate::i::String>)
where
- N: ArrayLength,
-{
- vec: Vec,
+ N: ArrayLength;
+
+impl crate::i::String {
+ /// `String` `const` constructor; wrap the returned value in [`String`](../struct.String.html)
+ pub const fn new() -> Self {
+ Self {
+ vec: crate::i::Vec::new(),
+ }
+ }
}
impl String
where
N: ArrayLength,
{
+ /// Constructs a new, empty `String` with a fixed capacity of `N`
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use heapless::String;
+ /// use heapless::consts::*;
+ ///
+ /// // allocate the string on the stack
+ /// let mut s: String = String::new();
+ ///
+ /// // allocate the string in a static variable
+ /// static mut S: String = String(heapless::i::String::new());
+ /// ```
#[inline]
- const_fn! {
- /// Constructs a new, empty `String` with a fixed capacity of `N`
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use heapless::String;
- /// use heapless::consts::*;
- ///
- /// let mut s: String = String::new();
- /// ```
- pub const fn new() -> Self {
- String { vec: Vec::new() }
- }
+ pub fn new() -> Self {
+ String(crate::i::String::new())
}
/// Converts a vector of bytes into a `String`.
@@ -85,7 +93,7 @@ where
// validate input
str::from_utf8(&*vec)?;
- Ok(String { vec: vec })
+ Ok(unsafe { String::from_utf8_unchecked(vec) })
}
/// Converts a vector of bytes to a `String` without checking that the
@@ -93,8 +101,11 @@ where
///
/// See the safe version, `from_utf8`, for more details.
#[inline]
- pub unsafe fn from_utf8_unchecked(vec: Vec) -> String {
- String { vec: vec }
+ pub unsafe fn from_utf8_unchecked(mut vec: Vec) -> String {
+ // FIXME this may result in a memcpy at runtime
+ let vec_ = mem::replace(&mut vec.0, mem::uninitialized());
+ mem::forget(vec);
+ String(crate::i::String { vec: vec_ })
}
/// Converts a `String` into a byte vector.
@@ -117,7 +128,7 @@ where
/// ```
#[inline]
pub fn into_bytes(self) -> Vec {
- self.vec
+ Vec(self.0.vec)
}
/// Extracts a string slice containing the entire string.
@@ -138,7 +149,7 @@ where
/// ```
#[inline]
pub fn as_str(&self) -> &str {
- unsafe { str::from_utf8_unchecked(&*self.vec) }
+ unsafe { str::from_utf8_unchecked(self.0.vec.as_slice()) }
}
/// Converts a `String` into a mutable string slice.
@@ -157,7 +168,7 @@ where
/// ```
#[inline]
pub fn as_mut_str(&mut self) -> &mut str {
- unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+ unsafe { str::from_utf8_unchecked_mut(self.0.vec.as_mut_slice()) }
}
/// Appends a given string slice onto the end of this `String`.
@@ -180,7 +191,7 @@ where
/// ```
#[inline]
pub fn push_str(&mut self, string: &str) -> Result<(), ()> {
- self.vec.extend_from_slice(string.as_bytes())
+ self.0.vec.extend_from_slice(string.as_bytes())
}
/// Returns the maximum number of elements the String can hold
@@ -198,7 +209,7 @@ where
/// ```
#[inline]
pub fn capacity(&self) -> usize {
- self.vec.capacity()
+ self.0.vec.capacity()
}
/// Appends the given [`char`] to the end of this `String`.
@@ -226,35 +237,14 @@ where
#[inline]
pub fn push(&mut self, c: char) -> Result<(), ()> {
match c.len_utf8() {
- 1 => self.vec.push(c as u8).map_err(|_| {}),
+ 1 => self.0.vec.push(c as u8).map_err(|_| {}),
_ => self
+ .0
.vec
.extend_from_slice(c.encode_utf8(&mut [0; 4]).as_bytes()),
}
}
- /// Returns a byte slice of this `String`'s contents.
- ///
- /// The inverse of this method is [`from_utf8`].
- ///
- /// [`from_utf8`]: #method.from_utf8
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use heapless::String;
- /// use heapless::consts::*;
- ///
- /// let s: String = String::from("hello");
- ///
- /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes());
- #[inline]
- pub fn as_bytes(&self) -> &[u8] {
- &self.vec
- }
-
/// Shortens this `String` to the specified length.
///
/// If `new_len` is greater than the string's current length, this has no
@@ -287,7 +277,7 @@ where
pub fn truncate(&mut self, new_len: usize) {
if new_len <= self.len() {
assert!(self.is_char_boundary(new_len));
- self.vec.truncate(new_len)
+ self.0.vec.truncate(new_len)
}
}
@@ -318,36 +308,14 @@ where
// pop bytes that correspond to `ch`
for _ in 0..ch.len_utf8() {
- self.vec.pop();
+ unsafe {
+ self.0.vec.pop_unchecked();
+ }
}
Some(ch)
}
- ///
- /// Returns `true` if this `String` has a length of zero.
- ///
- /// Returns `false` otherwise.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use heapless::String;
- /// use heapless::consts::*;
- ///
- /// let mut v: String = String::new();
- /// assert!(v.is_empty());
- ///
- /// v.push('a');
- /// assert!(!v.is_empty());
- /// ```
- #[inline]
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-
/// Truncates this `String`, removing all contents.
///
/// While this means the `String` will have a length of zero, it does not
@@ -371,25 +339,7 @@ where
/// ```
#[inline]
pub fn clear(&mut self) {
- self.vec.clear()
- }
-
- /// Returns the length of this `String`, in bytes.
- ///
- /// # Examples
- ///
- /// Basic usage:
- ///
- /// ```
- /// use heapless::String;
- /// use heapless::consts::*;
- ///
- /// let a: String = String::from("foo");
- ///
- /// assert_eq!(a.len(), 3);
- /// ```
- pub fn len(&self) -> usize {
- self.vec.len()
+ self.0.vec.clear()
}
}
@@ -431,9 +381,9 @@ where
N: ArrayLength,
{
fn clone(&self) -> Self {
- Self {
- vec: self.vec.clone(),
- }
+ Self(crate::i::String {
+ vec: self.0.vec.clone(),
+ })
}
}
@@ -442,8 +392,7 @@ where
N: ArrayLength,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let slice: &str = &**self;
- fmt::Debug::fmt(slice, f)
+ ::fmt(self, f)
}
}
@@ -452,8 +401,7 @@ where
N: ArrayLength,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let slice: &str = &**self;
- fmt::Display::fmt(slice, f)
+ ::fmt(self, f)
}
}
@@ -463,7 +411,7 @@ where
{
#[inline]
fn hash(&self, hasher: &mut H) {
- (**self).hash(hasher)
+ ::hash(self, hasher)
}
}
@@ -473,7 +421,7 @@ where
{
#[inline]
fn hash(&self, hasher: &mut H) {
- (**self).hash(hasher)
+ ::hash(self, hasher)
}
}
@@ -536,11 +484,11 @@ where
N2: ArrayLength,
{
fn eq(&self, rhs: &String) -> bool {
- PartialEq::eq(&**self, &**rhs)
+ str::eq(&**self, &**rhs)
}
fn ne(&self, rhs: &String) -> bool {
- PartialEq::ne(&**self, &**rhs)
+ str::ne(&**self, &**rhs)
}
}
@@ -552,11 +500,11 @@ macro_rules! impl_eq {
{
#[inline]
fn eq(&self, other: &$rhs) -> bool {
- PartialEq::eq(&self[..], &other[..])
+ str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &$rhs) -> bool {
- PartialEq::ne(&self[..], &other[..])
+ str::ne(&self[..], &other[..])
}
}
@@ -566,11 +514,11 @@ macro_rules! impl_eq {
{
#[inline]
fn eq(&self, other: &$lhs) -> bool {
- PartialEq::eq(&self[..], &other[..])
+ str::eq(&self[..], &other[..])
}
#[inline]
fn ne(&self, other: &$lhs) -> bool {
- PartialEq::ne(&self[..], &other[..])
+ str::ne(&self[..], &other[..])
}
}
};
@@ -610,10 +558,9 @@ impl_from_num!(u64, U20);
mod tests {
use crate::{consts::*, String, Vec};
- #[cfg(feature = "const-fn")]
#[test]
fn static_new() {
- static mut _S: String = String::new();
+ static mut _S: String = String(crate::i::String::new());
}
#[test]
diff --git a/src/vec.rs b/src/vec.rs
index 6855982b..b3dbee7c 100644
--- a/src/vec.rs
+++ b/src/vec.rs
@@ -3,6 +3,134 @@ use core::{fmt, hash, iter::FromIterator, mem::MaybeUninit, ops, ptr, slice};
use generic_array::{ArrayLength, GenericArray};
use hash32;
+impl crate::i::Vec {
+ /// `Vec` `const` constructor; wrap the returned value in [`Vec`](../struct.Vec.html)
+ pub const fn new() -> Self {
+ Self {
+ buffer: MaybeUninit::uninit(),
+ len: 0,
+ }
+ }
+}
+
+impl crate::i::Vec>
+where
+ N: ArrayLength,
+{
+ pub(crate) fn as_slice(&self) -> &[T] {
+ // NOTE(unsafe) avoid bound checks in the slicing operation
+ // &buffer[..self.len]
+ unsafe { slice::from_raw_parts(self.buffer.as_ptr() as *const T, self.len) }
+ }
+
+ pub(crate) fn as_mut_slice(&mut self) -> &mut [T] {
+ // NOTE(unsafe) avoid bound checks in the slicing operation
+ // &mut buffer[..len]
+ unsafe { slice::from_raw_parts_mut(self.buffer.as_mut_ptr() as *mut T, self.len) }
+ }
+
+ pub(crate) fn capacity(&self) -> usize {
+ N::to_usize()
+ }
+
+ pub(crate) fn clear(&mut self) {
+ self.truncate(0);
+ }
+
+ pub(crate) fn clone(&self) -> Self
+ where
+ T: Clone,
+ {
+ let mut new = Self::new();
+ new.extend_from_slice(self.as_slice()).unwrap();
+ new
+ }
+
+ pub(crate) fn extend(&mut self, iter: I)
+ where
+ I: IntoIterator
- ,
+ {
+ for elem in iter {
+ self.push(elem).ok().unwrap()
+ }
+ }
+
+ pub(crate) fn extend_from_slice(&mut self, other: &[T]) -> Result<(), ()>
+ where
+ T: Clone,
+ {
+ if self.len + other.len() > self.capacity() {
+ // won't fit in the `Vec`; don't modify anything and return an error
+ Err(())
+ } else {
+ for elem in other {
+ unsafe {
+ self.push_unchecked(elem.clone());
+ }
+ }
+ Ok(())
+ }
+ }
+
+ pub(crate) fn is_full(&self) -> bool {
+ self.len == self.capacity()
+ }
+
+ pub(crate) unsafe fn pop_unchecked(&mut self) -> T {
+ debug_assert!(!self.as_slice().is_empty());
+
+ self.len -= 1;
+ (self.buffer.as_ptr() as *const T).add(self.len).read()
+ }
+
+ pub(crate) fn push(&mut self, item: T) -> Result<(), T> {
+ if self.len < self.capacity() {
+ unsafe { self.push_unchecked(item) }
+ Ok(())
+ } else {
+ Err(item)
+ }
+ }
+
+ pub(crate) unsafe fn push_unchecked(&mut self, item: T) {
+ // NOTE(ptr::write) the memory slot that we are about to write to is uninitialized. We
+ // use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory
+ (self.buffer.as_mut_ptr() as *mut T)
+ .add(self.len)
+ .write(item);
+
+ self.len += 1;
+ }
+
+ unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T {
+ let length = self.len;
+ debug_assert!(index < length);
+ ptr::swap(
+ self.as_mut_slice().get_unchecked_mut(index),
+ self.as_mut_slice().get_unchecked_mut(length - 1),
+ );
+ self.pop_unchecked()
+ }
+
+ pub(crate) fn swap_remove(&mut self, index: usize) -> T {
+ assert!(index < self.len);
+ unsafe { self.swap_remove_unchecked(index) }
+ }
+
+ pub(crate) fn truncate(&mut self, len: usize) {
+ unsafe {
+ // drop any extra elements
+ while len < self.len {
+ // decrement len before the drop_in_place(), so a panic on Drop
+ // doesn't re-drop the just-failed value.
+ self.len -= 1;
+ let len = self.len;
+ ptr::drop_in_place(self.as_mut_slice().get_unchecked_mut(len));
+ }
+ }
+ }
+}
+
/// A fixed capacity [`Vec`](https://doc.rust-lang.org/std/vec/struct.Vec.html)
///
/// # Examples
@@ -32,13 +160,9 @@ use hash32;
/// }
/// assert_eq!(vec, [7, 1, 2, 3]);
/// ```
-pub struct Vec
+pub struct Vec(#[doc(hidden)] pub crate::i::Vec>)
where
- N: ArrayLength,
-{
- buffer: MaybeUninit>,
- len: usize,
-}
+ N: ArrayLength;
impl Clone for Vec
where
@@ -46,9 +170,7 @@ where
T: Clone,
{
fn clone(&self) -> Self {
- let mut new = Self::new();
- new.extend_from_slice(self.as_ref()).unwrap();
- new
+ Vec(self.0.clone())
}
}
@@ -57,25 +179,33 @@ where
N: ArrayLength,
{
/* Constructors */
- const_fn!(
- /// Constructs a new, empty vector with a fixed capacity of `N`
- pub const fn new() -> Self {
- Vec {
- buffer: MaybeUninit::uninit(),
- len: 0,
- }
- }
- );
+ /// Constructs a new, empty vector with a fixed capacity of `N`
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use heapless::Vec;
+ /// use heapless::consts::*;
+ ///
+ /// // allocate the vector on the stack
+ /// let mut x: Vec = Vec::new();
+ ///
+ /// // allocate the vector in a static variable
+ /// static mut X: Vec = Vec(heapless::i::Vec::new());
+ /// ```
+ pub fn new() -> Self {
+ Vec(crate::i::Vec::new())
+ }
/* Public API */
/// Returns the maximum number of elements the vector can hold
pub fn capacity(&self) -> usize {
- N::to_usize()
+ self.0.capacity()
}
/// Clears the vector, removing all values.
pub fn clear(&mut self) {
- self.truncate(0);
+ self.0.clear()
}
/// Clones and appends all elements in a slice to the `Vec`.
@@ -98,64 +228,38 @@ where
where
T: Clone,
{
- if self.len() + other.len() > self.capacity() {
- // won't fit in the `Vec`; don't modify anything and return an error
- Err(())
- } else {
- for elem in other {
- self.push(elem.clone()).ok();
- }
- Ok(())
- }
+ self.0.extend_from_slice(other)
}
/// Removes the last element from a vector and return it, or `None` if it's empty
pub fn pop(&mut self) -> Option {
- if self.len != 0 {
- Some(unsafe { self.pop_unchecked() })
+ if self.0.len != 0 {
+ Some(unsafe { self.0.pop_unchecked() })
} else {
None
}
}
- pub(crate) unsafe fn pop_unchecked(&mut self) -> T {
- debug_assert!(!self.is_empty());
-
- self.len -= 1;
- (self.buffer.as_ptr() as *const T).add(self.len).read()
- }
-
/// Appends an `item` to the back of the collection
///
/// Returns back the `item` if the vector is full
pub fn push(&mut self, item: T) -> Result<(), T> {
- if self.len < self.capacity() {
- unsafe { self.push_unchecked(item) }
- Ok(())
- } else {
- Err(item)
- }
+ self.0.push(item)
}
pub(crate) unsafe fn push_unchecked(&mut self, item: T) {
- // NOTE(ptr::write) the memory slot that we are about to write to is uninitialized. We
- // use `ptr::write` to avoid running `T`'s destructor on the uninitialized memory
- (self.buffer.as_mut_ptr() as *mut T)
- .add(self.len)
- .write(item);
-
- self.len += 1;
+ self.0.push_unchecked(item)
}
/// Shortens the vector, keeping the first `len` elements and dropping the rest.
pub fn truncate(&mut self, len: usize) {
unsafe {
// drop any extra elements
- while len < self.len {
+ while len < self.len() {
// decrement len before the drop_in_place(), so a panic on Drop
// doesn't re-drop the just-failed value.
- self.len -= 1;
- let len = self.len;
+ self.0.len -= 1;
+ let len = self.len();
ptr::drop_in_place(self.get_unchecked_mut(len));
}
}
@@ -176,8 +280,8 @@ where
return Err(());
}
- if new_len > self.len {
- while self.len < new_len {
+ if new_len > self.len() {
+ while self.len() < new_len {
self.push(value.clone()).ok();
}
} else {
@@ -230,23 +334,15 @@ where
/// assert_eq!(&*v, ["baz", "qux"]);
/// ```
pub fn swap_remove(&mut self, index: usize) -> T {
- assert!(index < self.len());
- unsafe { self.swap_remove_unchecked(index) }
+ self.0.swap_remove(index)
}
pub(crate) unsafe fn swap_remove_unchecked(&mut self, index: usize) -> T {
- let length = self.len();
- debug_assert!(index < length);
- ptr::swap(
- self.get_unchecked_mut(index),
- self.get_unchecked_mut(length - 1),
- );
- self.pop_unchecked()
+ self.0.swap_remove_unchecked(index)
}
- /* Private API */
pub(crate) fn is_full(&self) -> bool {
- self.capacity() == self.len()
+ self.0.is_full()
}
}
@@ -265,8 +361,7 @@ where
N: ArrayLength,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let slice: &[T] = &**self;
- slice.fmt(f)
+ <[T] as fmt::Debug>::fmt(self, f)
}
}
@@ -287,9 +382,7 @@ where
where
I: IntoIterator
- ,
{
- for elem in iter {
- self.push(elem).ok().unwrap()
- }
+ self.0.extend(iter)
}
}
@@ -312,7 +405,7 @@ where
N: ArrayLength,
{
fn hash(&self, state: &mut H) {
- hash::Hash::hash(&**self, state)
+ <[T] as hash::Hash>::hash(self, state)
}
}
@@ -322,7 +415,7 @@ where
N: ArrayLength,
{
fn hash(&self, state: &mut H) {
- hash32::Hash::hash(&**self, state)
+ <[T] as hash32::Hash>::hash(self, state)
}
}
@@ -387,7 +480,11 @@ where
type Item = T;
fn next(&mut self) -> Option {
if self.next < self.vec.len() {
- let item = unsafe { (self.vec.buffer.as_ptr() as *const T).add(self.next).read() };
+ let item = unsafe {
+ (self.vec.0.buffer.as_ptr() as *const T)
+ .add(self.next)
+ .read()
+ };
self.next += 1;
Some(item)
} else {
@@ -418,7 +515,7 @@ where
// Drop all the elements that have not been moved out of vec
ptr::drop_in_place(&mut self.vec[self.next..]);
// Prevent dropping of other elements
- self.vec.len = 0;
+ self.vec.0.len = 0;
}
}
}
@@ -442,7 +539,7 @@ where
A: PartialEq,
{
fn eq(&self, other: &Vec) -> bool {
- self[..] == other[..]
+ <[A]>::eq(self, &**other)
}
}
@@ -454,7 +551,7 @@ macro_rules! eq {
N: ArrayLength,
{
fn eq(&self, other: &$Rhs) -> bool {
- self[..] == other[..]
+ <[A]>::eq(self, &other[..])
}
}
};
@@ -492,9 +589,7 @@ where
type Target = [T];
fn deref(&self) -> &[T] {
- // NOTE(unsafe) avoid bound checks in the slicing operation
- // &buffer[..self.len]
- unsafe { slice::from_raw_parts(self.buffer.as_ptr() as *const T, self.len) }
+ self.0.as_slice()
}
}
@@ -503,11 +598,7 @@ where
N: ArrayLength,
{
fn deref_mut(&mut self) -> &mut [T] {
- let len = self.len();
-
- // NOTE(unsafe) avoid bound checks in the slicing operation
- // &mut buffer[..len]
- unsafe { slice::from_raw_parts_mut(self.buffer.as_mut_ptr() as *mut T, len) }
+ self.0.as_mut_slice()
}
}
@@ -555,10 +646,9 @@ where
mod tests {
use crate::{consts::*, Vec};
- #[cfg(feature = "const-fn")]
#[test]
fn static_new() {
- static mut _V: Vec = Vec::new();
+ static mut _V: Vec = Vec(crate::i::Vec::new());
}
macro_rules! droppable {
diff --git a/tests/tsan.rs b/tests/tsan.rs
index f0ac5719..a44e40e8 100644
--- a/tests/tsan.rs
+++ b/tests/tsan.rs
@@ -1,22 +1,18 @@
+#![deny(rust_2018_compatibility)]
+#![deny(rust_2018_idioms)]
#![deny(warnings)]
-extern crate generic_array;
-extern crate heapless;
-extern crate scoped_threadpool;
-
use std::thread;
use generic_array::typenum::Unsigned;
-use heapless::consts::*;
-use heapless::spsc;
+use heapless::{consts::*, spsc};
use scoped_threadpool::Pool;
#[test]
fn once() {
- static mut RB: Option> = None;
- unsafe { RB = Some(spsc::Queue::new()) };
+ static mut RB: spsc::Queue = spsc::Queue(heapless::i::Queue::new());
- let rb = unsafe { RB.as_mut().unwrap() };
+ let rb = unsafe { &mut RB };
rb.enqueue(0).unwrap();
@@ -35,10 +31,9 @@ fn once() {
#[test]
fn twice() {
- static mut RB: Option> = None;
- unsafe { RB = Some(spsc::Queue::new()) };
+ static mut RB: spsc::Queue = spsc::Queue(heapless::i::Queue::new());
- let rb = unsafe { RB.as_mut().unwrap() };
+ let rb = unsafe { &mut RB };
rb.enqueue(0).unwrap();
rb.enqueue(1).unwrap();