mirror of
https://github.com/tokio-rs/tokio.git
synced 2025-09-28 12:10:37 +00:00
rt: implement task::Id using StaticAtomicU64
(#5282)
This patch simplifies the implementation of `task::Id` by moving conditional compilation into the `AtomicU64` definition. To handle platforms that do not include `const fn Mutex::new()`, `StaticAtomicU64` is defined as always having a `const fn new()`. `StaticAtomicU64` is implemented with `OnceCell` when needed.
This commit is contained in:
parent
ae69d11d1f
commit
39766220f4
@ -25,6 +25,13 @@ pub(crate) mod sync {
|
||||
}
|
||||
}
|
||||
pub(crate) use loom::sync::*;
|
||||
|
||||
pub(crate) mod atomic {
|
||||
pub(crate) use loom::sync::atomic::*;
|
||||
|
||||
// TODO: implement a loom version
|
||||
pub(crate) type StaticAtomicU64 = std::sync::atomic::AtomicU64;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) mod rand {
|
||||
|
@ -7,12 +7,13 @@
|
||||
// `#[cfg(target_has_atomic = "64")]`.
|
||||
// Refs: https://github.com/rust-lang/rust/tree/master/src/librustc_target
|
||||
cfg_has_atomic_u64! {
|
||||
pub(crate) use std::sync::atomic::AtomicU64;
|
||||
#[path = "atomic_u64_native.rs"]
|
||||
mod imp;
|
||||
}
|
||||
|
||||
cfg_not_has_atomic_u64! {
|
||||
#[path = "atomic_u64_as_mutex.rs"]
|
||||
mod atomic_u64_as_mutex;
|
||||
|
||||
pub(crate) use atomic_u64_as_mutex::AtomicU64;
|
||||
mod imp;
|
||||
}
|
||||
|
||||
pub(crate) use imp::{AtomicU64, StaticAtomicU64};
|
||||
|
@ -1,18 +1,24 @@
|
||||
use crate::loom::sync::Mutex;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
cfg_has_const_mutex_new! {
|
||||
#[path = "atomic_u64_static_const_new.rs"]
|
||||
mod static_macro;
|
||||
}
|
||||
|
||||
cfg_not_has_const_mutex_new! {
|
||||
#[path = "atomic_u64_static_once_cell.rs"]
|
||||
mod static_macro;
|
||||
}
|
||||
|
||||
pub(crate) use static_macro::StaticAtomicU64;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct AtomicU64 {
|
||||
inner: Mutex<u64>,
|
||||
}
|
||||
|
||||
impl AtomicU64 {
|
||||
pub(crate) fn new(val: u64) -> Self {
|
||||
Self {
|
||||
inner: Mutex::new(val),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn load(&self, _: Ordering) -> u64 {
|
||||
*self.inner.lock()
|
||||
}
|
||||
|
4
tokio/src/loom/std/atomic_u64_native.rs
Normal file
4
tokio/src/loom/std/atomic_u64_native.rs
Normal file
@ -0,0 +1,4 @@
|
||||
pub(crate) use std::sync::atomic::{AtomicU64, Ordering};
|
||||
|
||||
/// Alias `AtomicU64` to `StaticAtomicU64`
|
||||
pub(crate) type StaticAtomicU64 = AtomicU64;
|
12
tokio/src/loom/std/atomic_u64_static_const_new.rs
Normal file
12
tokio/src/loom/std/atomic_u64_static_const_new.rs
Normal file
@ -0,0 +1,12 @@
|
||||
use super::AtomicU64;
|
||||
use crate::loom::sync::Mutex;
|
||||
|
||||
pub(crate) type StaticAtomicU64 = AtomicU64;
|
||||
|
||||
impl AtomicU64 {
|
||||
pub(crate) const fn new(val: u64) -> Self {
|
||||
Self {
|
||||
inner: Mutex::const_new(val),
|
||||
}
|
||||
}
|
||||
}
|
36
tokio/src/loom/std/atomic_u64_static_once_cell.rs
Normal file
36
tokio/src/loom/std/atomic_u64_static_once_cell.rs
Normal file
@ -0,0 +1,36 @@
|
||||
use super::AtomicU64;
|
||||
use crate::loom::sync::{atomic::Ordering, Mutex};
|
||||
use crate::util::once_cell::OnceCell;
|
||||
|
||||
pub(crate) struct StaticAtomicU64 {
|
||||
init: u64,
|
||||
cell: OnceCell<Mutex<u64>>,
|
||||
}
|
||||
|
||||
impl AtomicU64 {
|
||||
pub(crate) fn new(val: u64) -> Self {
|
||||
Self {
|
||||
inner: Mutex::new(val),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl StaticAtomicU64 {
|
||||
pub(crate) const fn new(val: u64) -> StaticAtomicU64 {
|
||||
StaticAtomicU64 {
|
||||
init: val,
|
||||
cell: OnceCell::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn fetch_add(&self, val: u64, order: Ordering) -> u64 {
|
||||
let mut lock = self.inner().lock();
|
||||
let prev = *lock;
|
||||
*lock = prev + val;
|
||||
prev
|
||||
}
|
||||
|
||||
fn inner(&self) -> &Mutex<u64> {
|
||||
self.cell.get(|| Mutex::new(self.init))
|
||||
}
|
||||
}
|
@ -71,7 +71,7 @@ pub(crate) mod sync {
|
||||
pub(crate) mod atomic {
|
||||
pub(crate) use crate::loom::std::atomic_u16::AtomicU16;
|
||||
pub(crate) use crate::loom::std::atomic_u32::AtomicU32;
|
||||
pub(crate) use crate::loom::std::atomic_u64::AtomicU64;
|
||||
pub(crate) use crate::loom::std::atomic_u64::{AtomicU64, StaticAtomicU64};
|
||||
pub(crate) use crate::loom::std::atomic_usize::AtomicUsize;
|
||||
|
||||
pub(crate) use std::sync::atomic::{fence, AtomicBool, AtomicPtr, AtomicU8, Ordering};
|
||||
|
@ -562,55 +562,12 @@ impl fmt::Display for Id {
|
||||
}
|
||||
|
||||
impl Id {
|
||||
// When 64-bit atomics are available, use a static `AtomicU64` counter to
|
||||
// generate task IDs.
|
||||
//
|
||||
// Note(eliza): we _could_ just use `crate::loom::AtomicU64`, which switches
|
||||
// between an atomic and mutex-based implementation here, rather than having
|
||||
// two separate functions for targets with and without 64-bit atomics.
|
||||
// However, because we can't use the mutex-based implementation in a static
|
||||
// initializer directly, the 32-bit impl also has to use a `OnceCell`, and I
|
||||
// thought it was nicer to avoid the `OnceCell` overhead on 64-bit
|
||||
// platforms...
|
||||
cfg_has_atomic_u64! {
|
||||
pub(crate) fn next() -> Self {
|
||||
use std::sync::atomic::{AtomicU64, Ordering::Relaxed};
|
||||
static NEXT_ID: AtomicU64 = AtomicU64::new(1);
|
||||
Self(NEXT_ID.fetch_add(1, Relaxed))
|
||||
}
|
||||
}
|
||||
pub(crate) fn next() -> Self {
|
||||
use crate::loom::sync::atomic::{Ordering::Relaxed, StaticAtomicU64};
|
||||
|
||||
cfg_not_has_atomic_u64! {
|
||||
cfg_has_const_mutex_new! {
|
||||
pub(crate) fn next() -> Self {
|
||||
use crate::loom::sync::Mutex;
|
||||
static NEXT_ID: Mutex<u64> = Mutex::const_new(1);
|
||||
static NEXT_ID: StaticAtomicU64 = StaticAtomicU64::new(1);
|
||||
|
||||
let mut lock = NEXT_ID.lock();
|
||||
let id = *lock;
|
||||
*lock += 1;
|
||||
Self(id)
|
||||
}
|
||||
}
|
||||
|
||||
cfg_not_has_const_mutex_new! {
|
||||
pub(crate) fn next() -> Self {
|
||||
use crate::util::once_cell::OnceCell;
|
||||
use crate::loom::sync::Mutex;
|
||||
|
||||
fn init_next_id() -> Mutex<u64> {
|
||||
Mutex::new(1)
|
||||
}
|
||||
|
||||
static NEXT_ID: OnceCell<Mutex<u64>> = OnceCell::new();
|
||||
|
||||
let next_id = NEXT_ID.get(init_next_id);
|
||||
let mut lock = next_id.lock();
|
||||
let id = *lock;
|
||||
*lock += 1;
|
||||
Self(id)
|
||||
}
|
||||
}
|
||||
Self(NEXT_ID.fetch_add(1, Relaxed))
|
||||
}
|
||||
|
||||
pub(crate) fn as_u64(&self) -> u64 {
|
||||
|
@ -25,7 +25,7 @@ impl<T> OnceCell<T> {
|
||||
/// If the `init` closure panics, then the `OnceCell` is poisoned and all
|
||||
/// future calls to `get` will panic.
|
||||
#[inline]
|
||||
pub(crate) fn get(&self, init: fn() -> T) -> &T {
|
||||
pub(crate) fn get(&self, init: impl FnOnce() -> T) -> &T {
|
||||
if !self.once.is_completed() {
|
||||
self.do_init(init);
|
||||
}
|
||||
@ -41,7 +41,7 @@ impl<T> OnceCell<T> {
|
||||
}
|
||||
|
||||
#[cold]
|
||||
fn do_init(&self, init: fn() -> T) {
|
||||
fn do_init(&self, init: impl FnOnce() -> T) {
|
||||
let value_ptr = self.value.get() as *mut T;
|
||||
|
||||
self.once.call_once(|| {
|
||||
|
Loading…
x
Reference in New Issue
Block a user