Skip to content

Commit 1016deb

Browse files
committed
Small cleanups in Windows Mutex.
- Move `held` into the boxed part, since the SRW lock implementation does not use this. This makes the Mutex 50% smaller. - Use `Cell` instead of `UnsafeCell` for `held`, such that `.replace()` can be used. - Add some comments.
1 parent 2d6cbd2 commit 1016deb

File tree

1 file changed

+32
-32
lines changed

1 file changed

+32
-32
lines changed

library/std/src/sys/windows/mutex.rs

+32-32
Original file line numberDiff line numberDiff line change
@@ -19,20 +19,25 @@
1919
//! CriticalSection is used and we keep track of who's holding the mutex to
2020
//! detect recursive locks.
2121
22-
use crate::cell::UnsafeCell;
22+
use crate::cell::{Cell, UnsafeCell};
2323
use crate::mem::{self, MaybeUninit};
2424
use crate::sync::atomic::{AtomicUsize, Ordering};
2525
use crate::sys::c;
2626
use crate::sys::compat;
2727

2828
pub struct Mutex {
29+
// This is either directly an SRWLOCK (if supported), or a Box<Inner> otherwise.
2930
lock: AtomicUsize,
30-
held: UnsafeCell<bool>,
3131
}
3232

3333
unsafe impl Send for Mutex {}
3434
unsafe impl Sync for Mutex {}
3535

36+
struct Inner {
37+
remutex: ReentrantMutex,
38+
held: Cell<bool>,
39+
}
40+
3641
#[derive(Clone, Copy)]
3742
enum Kind {
3843
SRWLock = 1,
@@ -51,7 +56,6 @@ impl Mutex {
5156
// This works because SRWLOCK_INIT is 0 (wrapped in a struct), so we are also properly
5257
// initializing an SRWLOCK here.
5358
lock: AtomicUsize::new(0),
54-
held: UnsafeCell::new(false),
5559
}
5660
}
5761
#[inline]
@@ -60,10 +64,11 @@ impl Mutex {
6064
match kind() {
6165
Kind::SRWLock => c::AcquireSRWLockExclusive(raw(self)),
6266
Kind::CriticalSection => {
63-
let re = self.remutex();
64-
(*re).lock();
65-
if !self.flag_locked() {
66-
(*re).unlock();
67+
let inner = &mut *self.inner();
68+
inner.remutex.lock();
69+
if inner.held.replace(true) {
70+
// It was already locked, so we got a recursive lock which we do not want.
71+
inner.remutex.unlock();
6772
panic!("cannot recursively lock a mutex");
6873
}
6974
}
@@ -73,23 +78,27 @@ impl Mutex {
7378
match kind() {
7479
Kind::SRWLock => c::TryAcquireSRWLockExclusive(raw(self)) != 0,
7580
Kind::CriticalSection => {
76-
let re = self.remutex();
77-
if !(*re).try_lock() {
81+
let inner = &mut *self.inner();
82+
if !inner.remutex.try_lock() {
7883
false
79-
} else if self.flag_locked() {
80-
true
81-
} else {
82-
(*re).unlock();
84+
} else if inner.held.replace(true) {
85+
// It was already locked, so we got a recursive lock which we do not want.
86+
inner.remutex.unlock();
8387
false
88+
} else {
89+
true
8490
}
8591
}
8692
}
8793
}
8894
pub unsafe fn unlock(&self) {
89-
*self.held.get() = false;
9095
match kind() {
9196
Kind::SRWLock => c::ReleaseSRWLockExclusive(raw(self)),
92-
Kind::CriticalSection => (*self.remutex()).unlock(),
97+
Kind::CriticalSection => {
98+
let inner = &mut *(self.lock.load(Ordering::SeqCst) as *mut Inner);
99+
inner.held.set(false);
100+
inner.remutex.unlock();
101+
}
93102
}
94103
}
95104
pub unsafe fn destroy(&self) {
@@ -98,37 +107,28 @@ impl Mutex {
98107
Kind::CriticalSection => match self.lock.load(Ordering::SeqCst) {
99108
0 => {}
100109
n => {
101-
Box::from_raw(n as *mut ReentrantMutex).destroy();
110+
Box::from_raw(n as *mut Inner).remutex.destroy();
102111
}
103112
},
104113
}
105114
}
106115

107-
unsafe fn remutex(&self) -> *mut ReentrantMutex {
116+
unsafe fn inner(&self) -> *mut Inner {
108117
match self.lock.load(Ordering::SeqCst) {
109118
0 => {}
110119
n => return n as *mut _,
111120
}
112-
let re = box ReentrantMutex::uninitialized();
113-
re.init();
114-
let re = Box::into_raw(re);
115-
match self.lock.compare_and_swap(0, re as usize, Ordering::SeqCst) {
116-
0 => re,
121+
let inner = box Inner { remutex: ReentrantMutex::uninitialized(), held: Cell::new(false) };
122+
inner.remutex.init();
123+
let inner = Box::into_raw(inner);
124+
match self.lock.compare_and_swap(0, inner as usize, Ordering::SeqCst) {
125+
0 => inner,
117126
n => {
118-
Box::from_raw(re).destroy();
127+
Box::from_raw(inner).remutex.destroy();
119128
n as *mut _
120129
}
121130
}
122131
}
123-
124-
unsafe fn flag_locked(&self) -> bool {
125-
if *self.held.get() {
126-
false
127-
} else {
128-
*self.held.get() = true;
129-
true
130-
}
131-
}
132132
}
133133

134134
fn kind() -> Kind {

0 commit comments

Comments
 (0)