Skip to content

Commit dd8ec9c

Browse files
committed
Auto merge of rust-lang#107586 - SparrowLii:parallel-query, r=cjgillot
Introduce `DynSend` and `DynSync` auto trait for parallel compiler part of parallel-rustc rust-lang#101566 This PR introduces `DynSend / DynSync` trait and `FromDyn / IntoDyn` structure in rustc_data_structure::marker. `FromDyn` can dynamically check data structures for thread safety when switching to parallel environments (such as calling `par_for_each_in`). This happens only when `-Z threads > 1` so it doesn't affect single-threaded mode's compile efficiency. r? `@cjgillot`
2 parents ebf2b37 + d7e3e5b commit dd8ec9c

File tree

26 files changed

+554
-107
lines changed

26 files changed

+554
-107
lines changed

Cargo.lock

+1
Original file line numberDiff line numberDiff line change
@@ -3306,6 +3306,7 @@ dependencies = [
33063306
"rustc-hash",
33073307
"rustc-rayon",
33083308
"rustc-rayon-core",
3309+
"rustc_arena",
33093310
"rustc_graphviz",
33103311
"rustc_index",
33113312
"rustc_macros",

compiler/rustc_ast/src/tokenstream.rs

+7-6
Original file line numberDiff line numberDiff line change
@@ -48,14 +48,15 @@ pub enum TokenTree {
4848
Delimited(DelimSpan, Delimiter, TokenStream),
4949
}
5050

51-
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
51+
// Ensure all fields of `TokenTree` are `DynSend` and `DynSync`.
5252
#[cfg(parallel_compiler)]
5353
fn _dummy()
5454
where
55-
Token: Send + Sync,
56-
DelimSpan: Send + Sync,
57-
Delimiter: Send + Sync,
58-
TokenStream: Send + Sync,
55+
Token: sync::DynSend + sync::DynSync,
56+
Spacing: sync::DynSend + sync::DynSync,
57+
DelimSpan: sync::DynSend + sync::DynSync,
58+
Delimiter: sync::DynSend + sync::DynSync,
59+
TokenStream: sync::DynSend + sync::DynSync,
5960
{
6061
}
6162

@@ -118,7 +119,7 @@ where
118119
}
119120
}
120121

121-
pub trait ToAttrTokenStream: sync::Send + sync::Sync {
122+
pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
122123
fn to_attr_token_stream(&self) -> AttrTokenStream;
123124
}
124125

compiler/rustc_codegen_ssa/src/base.rs

+6-11
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,7 @@ use rustc_ast::expand::allocator::AllocatorKind;
1717
use rustc_attr as attr;
1818
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
1919
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
20-
21-
use rustc_data_structures::sync::par_iter;
22-
#[cfg(parallel_compiler)]
23-
use rustc_data_structures::sync::ParallelIterator;
20+
use rustc_data_structures::sync::par_map;
2421
use rustc_hir as hir;
2522
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
2623
use rustc_hir::lang_items::LangItem;
@@ -689,7 +686,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
689686
// This likely is a temporary measure. Once we don't have to support the
690687
// non-parallel compiler anymore, we can compile CGUs end-to-end in
691688
// parallel and get rid of the complicated scheduling logic.
692-
let mut pre_compiled_cgus = if cfg!(parallel_compiler) {
689+
let mut pre_compiled_cgus = if tcx.sess.threads() > 1 {
693690
tcx.sess.time("compile_first_CGU_batch", || {
694691
// Try to find one CGU to compile per thread.
695692
let cgus: Vec<_> = cgu_reuse
@@ -702,12 +699,10 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
702699
// Compile the found CGUs in parallel.
703700
let start_time = Instant::now();
704701

705-
let pre_compiled_cgus = par_iter(cgus)
706-
.map(|(i, _)| {
707-
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
708-
(i, module)
709-
})
710-
.collect();
702+
let pre_compiled_cgus = par_map(cgus, |(i, _)| {
703+
let module = backend.compile_codegen_unit(tcx, codegen_units[i].name());
704+
(i, module)
705+
});
711706

712707
total_codegen_time += start_time.elapsed();
713708

compiler/rustc_codegen_ssa/src/traits/backend.rs

+4-1
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ use rustc_target::spec::Target;
2222

2323
pub use rustc_data_structures::sync::MetadataRef;
2424

25+
use rustc_data_structures::sync::{DynSend, DynSync};
2526
use std::any::Any;
2627

2728
pub trait BackendTypes {
@@ -117,7 +118,9 @@ pub trait CodegenBackend {
117118
) -> Result<(), ErrorGuaranteed>;
118119
}
119120

120-
pub trait ExtraBackendMethods: CodegenBackend + WriteBackendMethods + Sized + Send + Sync {
121+
pub trait ExtraBackendMethods:
122+
CodegenBackend + WriteBackendMethods + Sized + Send + Sync + DynSend + DynSync
123+
{
121124
fn codegen_allocator<'tcx>(
122125
&self,
123126
tcx: TyCtxt<'tcx>,

compiler/rustc_data_structures/Cargo.toml

+1
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ libc = "0.2"
1616
measureme = "10.0.0"
1717
rustc-rayon-core = { version = "0.5.0", optional = true }
1818
rustc-rayon = { version = "0.5.0", optional = true }
19+
rustc_arena = { path = "../rustc_arena" }
1920
rustc_graphviz = { path = "../rustc_graphviz" }
2021
rustc-hash = "1.1.0"
2122
rustc_index = { path = "../rustc_index", package = "rustc_index" }

compiler/rustc_data_structures/src/lib.rs

+2
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@
2626
#![feature(test)]
2727
#![feature(thread_id_value)]
2828
#![feature(vec_into_raw_parts)]
29+
#![feature(allocator_api)]
2930
#![feature(get_mut_unchecked)]
3031
#![feature(lint_reasons)]
3132
#![feature(unwrap_infallible)]
@@ -77,6 +78,7 @@ pub mod sorted_map;
7778
pub mod stable_hasher;
7879
mod atomic_ref;
7980
pub mod fingerprint;
81+
pub mod marker;
8082
pub mod profiling;
8183
pub mod sharded;
8284
pub mod stack;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,257 @@
1+
cfg_if!(
2+
if #[cfg(not(parallel_compiler))] {
3+
pub auto trait DynSend {}
4+
pub auto trait DynSync {}
5+
6+
impl<T> DynSend for T {}
7+
impl<T> DynSync for T {}
8+
} else {
9+
#[rustc_on_unimplemented(
10+
message = "`{Self}` doesn't implement `DynSend`. \
11+
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`"
12+
)]
13+
// This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()`
14+
// is true. These types can be wrapped in a `FromDyn` to get a `Send` type. Wrapping a
15+
// `Send` type in `IntoDynSyncSend` will create a `DynSend` type.
16+
pub unsafe auto trait DynSend {}
17+
18+
#[rustc_on_unimplemented(
19+
message = "`{Self}` doesn't implement `DynSync`. \
20+
Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`"
21+
)]
22+
// This is an auto trait for types which can be shared across threads if `sync::is_dyn_thread_safe()`
23+
// is true. These types can be wrapped in a `FromDyn` to get a `Sync` type. Wrapping a
24+
// `Sync` type in `IntoDynSyncSend` will create a `DynSync` type.
25+
pub unsafe auto trait DynSync {}
26+
27+
// Same with `Sync` and `Send`.
28+
unsafe impl<T: DynSync + ?Sized> DynSend for &T {}
29+
30+
macro_rules! impls_dyn_send_neg {
31+
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
32+
$(impl$(<$($generics1)*>)? !DynSend for $t1 {})*
33+
};
34+
}
35+
36+
// Consistent with `std`
37+
impls_dyn_send_neg!(
38+
[std::env::Args]
39+
[std::env::ArgsOs]
40+
[*const T where T: ?Sized]
41+
[*mut T where T: ?Sized]
42+
[std::ptr::NonNull<T> where T: ?Sized]
43+
[std::rc::Rc<T> where T: ?Sized]
44+
[std::rc::Weak<T> where T: ?Sized]
45+
[std::sync::MutexGuard<'_, T> where T: ?Sized]
46+
[std::sync::RwLockReadGuard<'_, T> where T: ?Sized]
47+
[std::sync::RwLockWriteGuard<'_, T> where T: ?Sized]
48+
[std::io::StdoutLock<'_>]
49+
[std::io::StderrLock<'_>]
50+
);
51+
cfg_if!(
52+
// Consistent with `std`
53+
// `os_imp::Env` is `!Send` in these platforms
54+
if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] {
55+
impl !DynSend for std::env::VarsOs {}
56+
}
57+
);
58+
59+
macro_rules! already_send {
60+
($([$ty: ty])*) => {
61+
$(unsafe impl DynSend for $ty where $ty: Send {})*
62+
};
63+
}
64+
65+
// These structures are already `Send`.
66+
already_send!(
67+
[std::backtrace::Backtrace]
68+
[std::io::Stdout]
69+
[std::io::Stderr]
70+
[std::io::Error]
71+
[std::fs::File]
72+
[rustc_arena::DroplessArena]
73+
[crate::memmap::Mmap]
74+
[crate::profiling::SelfProfiler]
75+
[crate::owned_slice::OwnedSlice]
76+
);
77+
78+
macro_rules! impl_dyn_send {
79+
($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
80+
$(unsafe impl<$($generics2)*> DynSend for $ty {})*
81+
};
82+
}
83+
84+
impl_dyn_send!(
85+
[std::sync::atomic::AtomicPtr<T> where T]
86+
[std::sync::Mutex<T> where T: ?Sized+ DynSend]
87+
[std::sync::mpsc::Sender<T> where T: DynSend]
88+
[std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
89+
[std::sync::LazyLock<T, F> where T: DynSend, F: DynSend]
90+
[std::collections::HashSet<K, S> where K: DynSend, S: DynSend]
91+
[std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
92+
[std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend]
93+
[Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend]
94+
[Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend]
95+
[crate::sync::Lock<T> where T: DynSend]
96+
[crate::sync::RwLock<T> where T: DynSend]
97+
[crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Send + crate::tagged_ptr::Pointer, T: Send + crate::tagged_ptr::Tag, const CP: bool]
98+
[rustc_arena::TypedArena<T> where T: DynSend]
99+
[indexmap::IndexSet<V, S> where V: DynSend, S: DynSend]
100+
[indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend]
101+
[thin_vec::ThinVec<T> where T: DynSend]
102+
[smallvec::SmallVec<A> where A: smallvec::Array + DynSend]
103+
);
104+
105+
macro_rules! impls_dyn_sync_neg {
106+
($([$t1: ty $(where $($generics1: tt)*)?])*) => {
107+
$(impl$(<$($generics1)*>)? !DynSync for $t1 {})*
108+
};
109+
}
110+
111+
// Consistent with `std`
112+
impls_dyn_sync_neg!(
113+
[std::env::Args]
114+
[std::env::ArgsOs]
115+
[*const T where T: ?Sized]
116+
[*mut T where T: ?Sized]
117+
[std::cell::Cell<T> where T: ?Sized]
118+
[std::cell::RefCell<T> where T: ?Sized]
119+
[std::cell::UnsafeCell<T> where T: ?Sized]
120+
[std::ptr::NonNull<T> where T: ?Sized]
121+
[std::rc::Rc<T> where T: ?Sized]
122+
[std::rc::Weak<T> where T: ?Sized]
123+
[std::cell::OnceCell<T> where T]
124+
[std::sync::mpsc::Receiver<T> where T]
125+
[std::sync::mpsc::Sender<T> where T]
126+
);
127+
cfg_if!(
128+
// Consistent with `std`
129+
// `os_imp::Env` is `!Sync` in these platforms
130+
if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] {
131+
impl !DynSync for std::env::VarsOs {}
132+
}
133+
);
134+
135+
macro_rules! already_sync {
136+
($([$ty: ty])*) => {
137+
$(unsafe impl DynSync for $ty where $ty: Sync {})*
138+
};
139+
}
140+
141+
// These structures are already `Sync`.
142+
already_sync!(
143+
[std::sync::atomic::AtomicBool]
144+
[std::sync::atomic::AtomicUsize]
145+
[std::sync::atomic::AtomicU8]
146+
[std::sync::atomic::AtomicU32]
147+
[std::sync::atomic::AtomicU64]
148+
[std::backtrace::Backtrace]
149+
[std::io::Error]
150+
[std::fs::File]
151+
[jobserver_crate::Client]
152+
[crate::memmap::Mmap]
153+
[crate::profiling::SelfProfiler]
154+
[crate::owned_slice::OwnedSlice]
155+
);
156+
157+
macro_rules! impl_dyn_sync {
158+
($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => {
159+
$(unsafe impl<$($generics2)*> DynSync for $ty {})*
160+
};
161+
}
162+
163+
impl_dyn_sync!(
164+
[std::sync::atomic::AtomicPtr<T> where T]
165+
[std::sync::OnceLock<T> where T: DynSend + DynSync]
166+
[std::sync::Mutex<T> where T: ?Sized + DynSend]
167+
[std::sync::Arc<T> where T: ?Sized + DynSync + DynSend]
168+
[std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend]
169+
[std::collections::HashSet<K, S> where K: DynSync, S: DynSync]
170+
[std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
171+
[std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync]
172+
[Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync]
173+
[Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync]
174+
[crate::sync::Lock<T> where T: DynSend]
175+
[crate::sync::RwLock<T> where T: DynSend + DynSync]
176+
[crate::sync::OneThread<T> where T]
177+
[crate::sync::WorkerLocal<T> where T: DynSend]
178+
[crate::intern::Interned<'a, T> where 'a, T: DynSync]
179+
[crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Sync + crate::tagged_ptr::Pointer, T: Sync + crate::tagged_ptr::Tag, const CP: bool]
180+
[parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend]
181+
[parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync]
182+
[indexmap::IndexSet<V, S> where V: DynSync, S: DynSync]
183+
[indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync]
184+
[smallvec::SmallVec<A> where A: smallvec::Array + DynSync]
185+
[thin_vec::ThinVec<T> where T: DynSync]
186+
);
187+
}
188+
);
189+
190+
pub fn assert_dyn_sync<T: ?Sized + DynSync>() {}
191+
pub fn assert_dyn_send<T: ?Sized + DynSend>() {}
192+
pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {}
193+
pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {}
194+
195+
#[derive(Copy, Clone)]
196+
pub struct FromDyn<T>(T);
197+
198+
impl<T> FromDyn<T> {
199+
#[inline(always)]
200+
pub fn from(val: T) -> Self {
201+
// Check that `sync::is_dyn_thread_safe()` is true on creation so we can
202+
// implement `Send` and `Sync` for this structure when `T`
203+
// implements `DynSend` and `DynSync` respectively.
204+
#[cfg(parallel_compiler)]
205+
assert!(crate::sync::is_dyn_thread_safe());
206+
FromDyn(val)
207+
}
208+
209+
#[inline(always)]
210+
pub fn into_inner(self) -> T {
211+
self.0
212+
}
213+
}
214+
215+
// `FromDyn` is `Send` if `T` is `DynSend`, since it ensures that sync::is_dyn_thread_safe() is true.
216+
#[cfg(parallel_compiler)]
217+
unsafe impl<T: DynSend> Send for FromDyn<T> {}
218+
219+
// `FromDyn` is `Sync` if `T` is `DynSync`, since it ensures that sync::is_dyn_thread_safe() is true.
220+
#[cfg(parallel_compiler)]
221+
unsafe impl<T: DynSync> Sync for FromDyn<T> {}
222+
223+
impl<T> std::ops::Deref for FromDyn<T> {
224+
type Target = T;
225+
226+
#[inline(always)]
227+
fn deref(&self) -> &Self::Target {
228+
&self.0
229+
}
230+
}
231+
232+
// A wrapper to convert a struct that is already a `Send` or `Sync` into
233+
// an instance of `DynSend` and `DynSync`, since the compiler cannot infer
234+
// it automatically in some cases. (e.g. Box<dyn Send / Sync>)
235+
#[derive(Copy, Clone)]
236+
pub struct IntoDynSyncSend<T: ?Sized>(pub T);
237+
238+
#[cfg(parallel_compiler)]
239+
unsafe impl<T: ?Sized + Send> DynSend for IntoDynSyncSend<T> {}
240+
#[cfg(parallel_compiler)]
241+
unsafe impl<T: ?Sized + Sync> DynSync for IntoDynSyncSend<T> {}
242+
243+
impl<T> std::ops::Deref for IntoDynSyncSend<T> {
244+
type Target = T;
245+
246+
#[inline(always)]
247+
fn deref(&self) -> &T {
248+
&self.0
249+
}
250+
}
251+
252+
impl<T> std::ops::DerefMut for IntoDynSyncSend<T> {
253+
#[inline(always)]
254+
fn deref_mut(&mut self) -> &mut T {
255+
&mut self.0
256+
}
257+
}

compiler/rustc_data_structures/src/owned_slice/tests.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,6 @@ fn drop_drops() {
6969

7070
#[test]
7171
fn send_sync() {
72-
crate::sync::assert_send::<OwnedSlice>();
73-
crate::sync::assert_sync::<OwnedSlice>();
72+
crate::sync::assert_dyn_send::<OwnedSlice>();
73+
crate::sync::assert_dyn_sync::<OwnedSlice>();
7474
}

0 commit comments

Comments
 (0)