|
| 1 | +cfg_if!( |
| 2 | + if #[cfg(not(parallel_compiler))] { |
| 3 | + pub auto trait DynSend {} |
| 4 | + pub auto trait DynSync {} |
| 5 | + |
| 6 | + impl<T> DynSend for T {} |
| 7 | + impl<T> DynSync for T {} |
| 8 | + } else { |
| 9 | + #[rustc_on_unimplemented( |
| 10 | + message = "`{Self}` doesn't implement `DynSend`. \ |
| 11 | + Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Send`" |
| 12 | + )] |
| 13 | + // This is an auto trait for types which can be sent across threads if `sync::is_dyn_thread_safe()` |
| 14 | + // is true. These types can be wrapped in a `FromDyn` to get a `Send` type. Wrapping a |
| 15 | + // `Send` type in `IntoDynSyncSend` will create a `DynSend` type. |
| 16 | + pub unsafe auto trait DynSend {} |
| 17 | + |
| 18 | + #[rustc_on_unimplemented( |
| 19 | + message = "`{Self}` doesn't implement `DynSync`. \ |
| 20 | + Add it to `rustc_data_structures::marker` or use `IntoDynSyncSend` if it's already `Sync`" |
| 21 | + )] |
| 22 | + // This is an auto trait for types which can be shared across threads if `sync::is_dyn_thread_safe()` |
| 23 | + // is true. These types can be wrapped in a `FromDyn` to get a `Sync` type. Wrapping a |
| 24 | + // `Sync` type in `IntoDynSyncSend` will create a `DynSync` type. |
| 25 | + pub unsafe auto trait DynSync {} |
| 26 | + |
| 27 | + // Same with `Sync` and `Send`. |
| 28 | + unsafe impl<T: DynSync + ?Sized> DynSend for &T {} |
| 29 | + |
| 30 | + macro_rules! impls_dyn_send_neg { |
| 31 | + ($([$t1: ty $(where $($generics1: tt)*)?])*) => { |
| 32 | + $(impl$(<$($generics1)*>)? !DynSend for $t1 {})* |
| 33 | + }; |
| 34 | + } |
| 35 | + |
| 36 | + // Consistent with `std` |
| 37 | + impls_dyn_send_neg!( |
| 38 | + [std::env::Args] |
| 39 | + [std::env::ArgsOs] |
| 40 | + [*const T where T: ?Sized] |
| 41 | + [*mut T where T: ?Sized] |
| 42 | + [std::ptr::NonNull<T> where T: ?Sized] |
| 43 | + [std::rc::Rc<T> where T: ?Sized] |
| 44 | + [std::rc::Weak<T> where T: ?Sized] |
| 45 | + [std::sync::MutexGuard<'_, T> where T: ?Sized] |
| 46 | + [std::sync::RwLockReadGuard<'_, T> where T: ?Sized] |
| 47 | + [std::sync::RwLockWriteGuard<'_, T> where T: ?Sized] |
| 48 | + [std::io::StdoutLock<'_>] |
| 49 | + [std::io::StderrLock<'_>] |
| 50 | + ); |
| 51 | + cfg_if!( |
| 52 | + // Consistent with `std` |
| 53 | + // `os_imp::Env` is `!Send` in these platforms |
| 54 | + if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] { |
| 55 | + impl !DynSend for std::env::VarsOs {} |
| 56 | + } |
| 57 | + ); |
| 58 | + |
| 59 | + macro_rules! already_send { |
| 60 | + ($([$ty: ty])*) => { |
| 61 | + $(unsafe impl DynSend for $ty where $ty: Send {})* |
| 62 | + }; |
| 63 | + } |
| 64 | + |
| 65 | + // These structures are already `Send`. |
| 66 | + already_send!( |
| 67 | + [std::backtrace::Backtrace] |
| 68 | + [std::io::Stdout] |
| 69 | + [std::io::Stderr] |
| 70 | + [std::io::Error] |
| 71 | + [std::fs::File] |
| 72 | + [rustc_arena::DroplessArena] |
| 73 | + [crate::memmap::Mmap] |
| 74 | + [crate::profiling::SelfProfiler] |
| 75 | + [crate::owned_slice::OwnedSlice] |
| 76 | + ); |
| 77 | + |
| 78 | + macro_rules! impl_dyn_send { |
| 79 | + ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => { |
| 80 | + $(unsafe impl<$($generics2)*> DynSend for $ty {})* |
| 81 | + }; |
| 82 | + } |
| 83 | + |
| 84 | + impl_dyn_send!( |
| 85 | + [std::sync::atomic::AtomicPtr<T> where T] |
| 86 | + [std::sync::Mutex<T> where T: ?Sized+ DynSend] |
| 87 | + [std::sync::mpsc::Sender<T> where T: DynSend] |
| 88 | + [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend] |
| 89 | + [std::sync::LazyLock<T, F> where T: DynSend, F: DynSend] |
| 90 | + [std::collections::HashSet<K, S> where K: DynSend, S: DynSend] |
| 91 | + [std::collections::HashMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend] |
| 92 | + [std::collections::BTreeMap<K, V, A> where K: DynSend, V: DynSend, A: std::alloc::Allocator + Clone + DynSend] |
| 93 | + [Vec<T, A> where T: DynSend, A: std::alloc::Allocator + DynSend] |
| 94 | + [Box<T, A> where T: ?Sized + DynSend, A: std::alloc::Allocator + DynSend] |
| 95 | + [crate::sync::Lock<T> where T: DynSend] |
| 96 | + [crate::sync::RwLock<T> where T: DynSend] |
| 97 | + [crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Send + crate::tagged_ptr::Pointer, T: Send + crate::tagged_ptr::Tag, const CP: bool] |
| 98 | + [rustc_arena::TypedArena<T> where T: DynSend] |
| 99 | + [indexmap::IndexSet<V, S> where V: DynSend, S: DynSend] |
| 100 | + [indexmap::IndexMap<K, V, S> where K: DynSend, V: DynSend, S: DynSend] |
| 101 | + [thin_vec::ThinVec<T> where T: DynSend] |
| 102 | + [smallvec::SmallVec<A> where A: smallvec::Array + DynSend] |
| 103 | + ); |
| 104 | + |
| 105 | + macro_rules! impls_dyn_sync_neg { |
| 106 | + ($([$t1: ty $(where $($generics1: tt)*)?])*) => { |
| 107 | + $(impl$(<$($generics1)*>)? !DynSync for $t1 {})* |
| 108 | + }; |
| 109 | + } |
| 110 | + |
| 111 | + // Consistent with `std` |
| 112 | + impls_dyn_sync_neg!( |
| 113 | + [std::env::Args] |
| 114 | + [std::env::ArgsOs] |
| 115 | + [*const T where T: ?Sized] |
| 116 | + [*mut T where T: ?Sized] |
| 117 | + [std::cell::Cell<T> where T: ?Sized] |
| 118 | + [std::cell::RefCell<T> where T: ?Sized] |
| 119 | + [std::cell::UnsafeCell<T> where T: ?Sized] |
| 120 | + [std::ptr::NonNull<T> where T: ?Sized] |
| 121 | + [std::rc::Rc<T> where T: ?Sized] |
| 122 | + [std::rc::Weak<T> where T: ?Sized] |
| 123 | + [std::cell::OnceCell<T> where T] |
| 124 | + [std::sync::mpsc::Receiver<T> where T] |
| 125 | + [std::sync::mpsc::Sender<T> where T] |
| 126 | + ); |
| 127 | + cfg_if!( |
| 128 | + // Consistent with `std` |
| 129 | + // `os_imp::Env` is `!Sync` in these platforms |
| 130 | + if #[cfg(any(unix, target_os = "hermit", target_os = "wasi", target_os = "solid_asp3"))] { |
| 131 | + impl !DynSync for std::env::VarsOs {} |
| 132 | + } |
| 133 | + ); |
| 134 | + |
| 135 | + macro_rules! already_sync { |
| 136 | + ($([$ty: ty])*) => { |
| 137 | + $(unsafe impl DynSync for $ty where $ty: Sync {})* |
| 138 | + }; |
| 139 | + } |
| 140 | + |
| 141 | + // These structures are already `Sync`. |
| 142 | + already_sync!( |
| 143 | + [std::sync::atomic::AtomicBool] |
| 144 | + [std::sync::atomic::AtomicUsize] |
| 145 | + [std::sync::atomic::AtomicU8] |
| 146 | + [std::sync::atomic::AtomicU32] |
| 147 | + [std::sync::atomic::AtomicU64] |
| 148 | + [std::backtrace::Backtrace] |
| 149 | + [std::io::Error] |
| 150 | + [std::fs::File] |
| 151 | + [jobserver_crate::Client] |
| 152 | + [crate::memmap::Mmap] |
| 153 | + [crate::profiling::SelfProfiler] |
| 154 | + [crate::owned_slice::OwnedSlice] |
| 155 | + ); |
| 156 | + |
| 157 | + macro_rules! impl_dyn_sync { |
| 158 | + ($($($attr: meta)* [$ty: ty where $($generics2: tt)*])*) => { |
| 159 | + $(unsafe impl<$($generics2)*> DynSync for $ty {})* |
| 160 | + }; |
| 161 | + } |
| 162 | + |
| 163 | + impl_dyn_sync!( |
| 164 | + [std::sync::atomic::AtomicPtr<T> where T] |
| 165 | + [std::sync::OnceLock<T> where T: DynSend + DynSync] |
| 166 | + [std::sync::Mutex<T> where T: ?Sized + DynSend] |
| 167 | + [std::sync::Arc<T> where T: ?Sized + DynSync + DynSend] |
| 168 | + [std::sync::LazyLock<T, F> where T: DynSend + DynSync, F: DynSend] |
| 169 | + [std::collections::HashSet<K, S> where K: DynSync, S: DynSync] |
| 170 | + [std::collections::HashMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync] |
| 171 | + [std::collections::BTreeMap<K, V, A> where K: DynSync, V: DynSync, A: std::alloc::Allocator + Clone + DynSync] |
| 172 | + [Vec<T, A> where T: DynSync, A: std::alloc::Allocator + DynSync] |
| 173 | + [Box<T, A> where T: ?Sized + DynSync, A: std::alloc::Allocator + DynSync] |
| 174 | + [crate::sync::Lock<T> where T: DynSend] |
| 175 | + [crate::sync::RwLock<T> where T: DynSend + DynSync] |
| 176 | + [crate::sync::OneThread<T> where T] |
| 177 | + [crate::sync::WorkerLocal<T> where T: DynSend] |
| 178 | + [crate::intern::Interned<'a, T> where 'a, T: DynSync] |
| 179 | + [crate::tagged_ptr::CopyTaggedPtr<P, T, CP> where P: Sync + crate::tagged_ptr::Pointer, T: Sync + crate::tagged_ptr::Tag, const CP: bool] |
| 180 | + [parking_lot::lock_api::Mutex<R, T> where R: DynSync, T: ?Sized + DynSend] |
| 181 | + [parking_lot::lock_api::RwLock<R, T> where R: DynSync, T: ?Sized + DynSend + DynSync] |
| 182 | + [indexmap::IndexSet<V, S> where V: DynSync, S: DynSync] |
| 183 | + [indexmap::IndexMap<K, V, S> where K: DynSync, V: DynSync, S: DynSync] |
| 184 | + [smallvec::SmallVec<A> where A: smallvec::Array + DynSync] |
| 185 | + [thin_vec::ThinVec<T> where T: DynSync] |
| 186 | + ); |
| 187 | + } |
| 188 | +); |
| 189 | + |
| 190 | +pub fn assert_dyn_sync<T: ?Sized + DynSync>() {} |
| 191 | +pub fn assert_dyn_send<T: ?Sized + DynSend>() {} |
| 192 | +pub fn assert_dyn_send_val<T: ?Sized + DynSend>(_t: &T) {} |
| 193 | +pub fn assert_dyn_send_sync_val<T: ?Sized + DynSync + DynSend>(_t: &T) {} |
| 194 | + |
| 195 | +#[derive(Copy, Clone)] |
| 196 | +pub struct FromDyn<T>(T); |
| 197 | + |
| 198 | +impl<T> FromDyn<T> { |
| 199 | + #[inline(always)] |
| 200 | + pub fn from(val: T) -> Self { |
| 201 | + // Check that `sync::is_dyn_thread_safe()` is true on creation so we can |
| 202 | + // implement `Send` and `Sync` for this structure when `T` |
| 203 | + // implements `DynSend` and `DynSync` respectively. |
| 204 | + #[cfg(parallel_compiler)] |
| 205 | + assert!(crate::sync::is_dyn_thread_safe()); |
| 206 | + FromDyn(val) |
| 207 | + } |
| 208 | + |
| 209 | + #[inline(always)] |
| 210 | + pub fn into_inner(self) -> T { |
| 211 | + self.0 |
| 212 | + } |
| 213 | +} |
| 214 | + |
| 215 | +// `FromDyn` is `Send` if `T` is `DynSend`, since it ensures that sync::is_dyn_thread_safe() is true. |
| 216 | +#[cfg(parallel_compiler)] |
| 217 | +unsafe impl<T: DynSend> Send for FromDyn<T> {} |
| 218 | + |
| 219 | +// `FromDyn` is `Sync` if `T` is `DynSync`, since it ensures that sync::is_dyn_thread_safe() is true. |
| 220 | +#[cfg(parallel_compiler)] |
| 221 | +unsafe impl<T: DynSync> Sync for FromDyn<T> {} |
| 222 | + |
| 223 | +impl<T> std::ops::Deref for FromDyn<T> { |
| 224 | + type Target = T; |
| 225 | + |
| 226 | + #[inline(always)] |
| 227 | + fn deref(&self) -> &Self::Target { |
| 228 | + &self.0 |
| 229 | + } |
| 230 | +} |
| 231 | + |
| 232 | +// A wrapper to convert a struct that is already a `Send` or `Sync` into |
| 233 | +// an instance of `DynSend` and `DynSync`, since the compiler cannot infer |
| 234 | +// it automatically in some cases. (e.g. Box<dyn Send / Sync>) |
| 235 | +#[derive(Copy, Clone)] |
| 236 | +pub struct IntoDynSyncSend<T: ?Sized>(pub T); |
| 237 | + |
| 238 | +#[cfg(parallel_compiler)] |
| 239 | +unsafe impl<T: ?Sized + Send> DynSend for IntoDynSyncSend<T> {} |
| 240 | +#[cfg(parallel_compiler)] |
| 241 | +unsafe impl<T: ?Sized + Sync> DynSync for IntoDynSyncSend<T> {} |
| 242 | + |
| 243 | +impl<T> std::ops::Deref for IntoDynSyncSend<T> { |
| 244 | + type Target = T; |
| 245 | + |
| 246 | + #[inline(always)] |
| 247 | + fn deref(&self) -> &T { |
| 248 | + &self.0 |
| 249 | + } |
| 250 | +} |
| 251 | + |
| 252 | +impl<T> std::ops::DerefMut for IntoDynSyncSend<T> { |
| 253 | + #[inline(always)] |
| 254 | + fn deref_mut(&mut self) -> &mut T { |
| 255 | + &mut self.0 |
| 256 | + } |
| 257 | +} |
0 commit comments