Skip to content

Commit

Permalink
refactor WorkerLocal
Browse files Browse the repository at this point in the history
  • Loading branch information
SparrowLii committed Mar 22, 2023
1 parent d610b0c commit 477e410
Show file tree
Hide file tree
Showing 2 changed files with 48 additions and 31 deletions.
4 changes: 2 additions & 2 deletions compiler/rustc_data_structures/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ indexmap = { version = "1.9.1" }
jobserver_crate = { version = "0.1.13", package = "jobserver" }
libc = "0.2"
measureme = "10.0.0"
rayon-core = { version = "0.4.0", package = "rustc-rayon-core", optional = true }
rayon-core = { version = "0.4.0", package = "rustc-rayon-core" }
rayon = { version = "0.4.0", package = "rustc-rayon", optional = true }
rustc_graphviz = { path = "../rustc_graphviz" }
rustc-hash = "1.1.0"
Expand Down Expand Up @@ -43,4 +43,4 @@ winapi = { version = "0.3", features = ["fileapi", "psapi", "winerror"] }
memmap2 = "0.2.1"

[features]
rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rayon", "rayon-core"]
rustc_use_parallel_compiler = ["indexmap/rustc-rayon", "rayon"]
75 changes: 46 additions & 29 deletions compiler/rustc_data_structures/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
use crate::owning_ref::{Erased, OwningRef};
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use std::mem::MaybeUninit;
use std::ops::{Deref, DerefMut};
use std::panic::{catch_unwind, resume_unwind, AssertUnwindSafe};

Expand All @@ -30,6 +31,8 @@ pub use vec::AppendOnlyVec;

mod vec;

static PARALLEL: std::sync::atomic::AtomicBool = std::sync::atomic::AtomicBool::new(false);

cfg_if! {
if #[cfg(not(parallel_compiler))] {
pub auto trait Send {}
Expand Down Expand Up @@ -182,33 +185,6 @@ cfg_if! {

use std::cell::Cell;

#[derive(Debug)]
pub struct WorkerLocal<T>(OneThread<T>);

impl<T> WorkerLocal<T> {
/// Creates a new worker local where the `initial` closure computes the
/// value this worker local should take for each thread in the thread pool.
#[inline]
pub fn new<F: FnMut(usize) -> T>(mut f: F) -> WorkerLocal<T> {
WorkerLocal(OneThread::new(f(0)))
}

/// Returns the worker-local value for each thread
#[inline]
pub fn into_inner(self) -> Vec<T> {
vec![OneThread::into_inner(self.0)]
}
}

impl<T> Deref for WorkerLocal<T> {
type Target = T;

#[inline(always)]
fn deref(&self) -> &T {
&self.0
}
}

pub type MTRef<'a, T> = &'a mut T;

#[derive(Debug, Default)]
Expand Down Expand Up @@ -328,8 +304,6 @@ cfg_if! {
};
}

pub use rayon_core::WorkerLocal;

pub use rayon::iter::ParallelIterator;
use rayon::iter::IntoParallelIterator;

Expand Down Expand Up @@ -364,6 +338,49 @@ cfg_if! {
}
}

#[derive(Debug)]
pub struct WorkerLocal<T> {
single_thread: bool,
inner: T,
mt_inner: Option<rayon_core::WorkerLocal<T>>,
}

impl<T> WorkerLocal<T> {
/// Creates a new worker local where the `initial` closure computes the
/// value this worker local should take for each thread in the thread pool.
#[inline]
pub fn new<F: FnMut(usize) -> T>(mut f: F) -> WorkerLocal<T> {
if !PARALLEL.load(Ordering::Relaxed) {
WorkerLocal { single_thread: true, inner: f(0), mt_inner: None }
} else {
// Safety: `inner` would never be accessed when multiple threads
WorkerLocal {
single_thread: false,
inner: unsafe { MaybeUninit::uninit().assume_init() },
mt_inner: Some(rayon_core::WorkerLocal::new(f)),
}
}
}

/// Returns the worker-local value for each thread
#[inline]
pub fn into_inner(self) -> Vec<T> {
if self.single_thread { vec![self.inner] } else { self.mt_inner.unwrap().into_inner() }
}
}

impl<T> Deref for WorkerLocal<T> {
type Target = T;

#[inline(always)]
fn deref(&self) -> &T {
if self.single_thread { &self.inner } else { self.mt_inner.as_ref().unwrap().deref() }
}
}

// Just for speed test
unsafe impl<T: Send> std::marker::Sync for WorkerLocal<T> {}

pub fn assert_sync<T: ?Sized + Sync>() {}
pub fn assert_send<T: ?Sized + Send>() {}
pub fn assert_send_val<T: ?Sized + Send>(_t: &T) {}
Expand Down

0 comments on commit 477e410

Please sign in to comment.