1
- use crate :: cell:: UnsafeCell ;
1
+ use crate :: cell:: Cell ;
2
2
use crate :: mem;
3
3
use crate :: mem:: MaybeUninit ;
4
4
use crate :: sync:: atomic:: { AtomicU32 , Ordering } ;
@@ -50,28 +50,23 @@ impl Mutex {
50
50
}
51
51
52
52
pub struct ReentrantMutex {
53
- lock : UnsafeCell < MaybeUninit < AtomicU32 > > ,
54
- recursion : UnsafeCell < MaybeUninit < u32 > > ,
53
+ lock : AtomicU32 ,
54
+ recursion : Cell < u32 > ,
55
55
}
56
56
57
+ unsafe impl Send for ReentrantMutex { }
58
+ unsafe impl Sync for ReentrantMutex { }
59
+
57
60
impl ReentrantMutex {
58
61
pub const unsafe fn uninitialized ( ) -> ReentrantMutex {
59
- ReentrantMutex {
60
- lock : UnsafeCell :: new ( MaybeUninit :: uninit ( ) ) ,
61
- recursion : UnsafeCell :: new ( MaybeUninit :: uninit ( ) ) ,
62
- }
62
+ ReentrantMutex { lock : AtomicU32 :: new ( abi:: LOCK_UNLOCKED . 0 ) , recursion : Cell :: new ( 0 ) }
63
63
}
64
64
65
- pub unsafe fn init ( & self ) {
66
- * self . lock . get ( ) = MaybeUninit :: new ( AtomicU32 :: new ( abi:: LOCK_UNLOCKED . 0 ) ) ;
67
- * self . recursion . get ( ) = MaybeUninit :: new ( 0 ) ;
68
- }
65
+ pub unsafe fn init ( & self ) { }
69
66
70
67
pub unsafe fn try_lock ( & self ) -> bool {
71
68
// Attempt to acquire the lock.
72
- let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
73
- let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
74
- if let Err ( old) = ( * lock) . compare_exchange (
69
+ if let Err ( old) = self . lock . compare_exchange (
75
70
abi:: LOCK_UNLOCKED . 0 ,
76
71
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
77
72
Ordering :: Acquire ,
@@ -80,22 +75,22 @@ impl ReentrantMutex {
80
75
// If we fail to acquire the lock, it may be the case
81
76
// that we've already acquired it and may need to recurse.
82
77
if old & !abi:: LOCK_KERNEL_MANAGED . 0 == __pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 {
83
- * recursion += 1 ;
78
+ self . recursion . set ( self . recursion . get ( ) + 1 ) ;
84
79
true
85
80
} else {
86
81
false
87
82
}
88
83
} else {
89
84
// Success.
90
- assert_eq ! ( * recursion, 0 , "Mutex has invalid recursion count" ) ;
85
+ assert_eq ! ( self . recursion. get ( ) , 0 , "Mutex has invalid recursion count" ) ;
91
86
true
92
87
}
93
88
}
94
89
95
90
pub unsafe fn lock ( & self ) {
96
91
if !self . try_lock ( ) {
97
92
// Call into the kernel to acquire a write lock.
98
- let lock = self . lock . get ( ) ;
93
+ let lock = & self . lock as * const AtomicU32 ;
99
94
let subscription = abi:: subscription {
100
95
type_ : abi:: eventtype:: LOCK_WRLOCK ,
101
96
union : abi:: subscription_union {
@@ -116,17 +111,17 @@ impl ReentrantMutex {
116
111
}
117
112
118
113
pub unsafe fn unlock ( & self ) {
119
- let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
120
- let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
121
114
assert_eq ! (
122
- ( * lock) . load( Ordering :: Relaxed ) & !abi:: LOCK_KERNEL_MANAGED . 0 ,
115
+ self . lock. load( Ordering :: Relaxed ) & !abi:: LOCK_KERNEL_MANAGED . 0 ,
123
116
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
124
117
"This mutex is locked by a different thread"
125
118
) ;
126
119
127
- if * recursion > 0 {
128
- * recursion -= 1 ;
129
- } else if !( * lock)
120
+ let r = self . recursion . get ( ) ;
121
+ if r > 0 {
122
+ self . recursion . set ( r - 1 ) ;
123
+ } else if !self
124
+ . lock
130
125
. compare_exchange (
131
126
__pthread_thread_id. 0 | abi:: LOCK_WRLOCKED . 0 ,
132
127
abi:: LOCK_UNLOCKED . 0 ,
@@ -137,19 +132,20 @@ impl ReentrantMutex {
137
132
{
138
133
// Lock is managed by kernelspace. Call into the kernel
139
134
// to unblock waiting threads.
140
- let ret = abi:: lock_unlock ( lock as * mut abi:: lock , abi:: scope:: PRIVATE ) ;
135
+ let ret = abi:: lock_unlock (
136
+ & self . lock as * const AtomicU32 as * mut abi:: lock ,
137
+ abi:: scope:: PRIVATE ,
138
+ ) ;
141
139
assert_eq ! ( ret, abi:: errno:: SUCCESS , "Failed to unlock a mutex" ) ;
142
140
}
143
141
}
144
142
145
143
pub unsafe fn destroy ( & self ) {
146
- let lock = ( * self . lock . get ( ) ) . as_mut_ptr ( ) ;
147
- let recursion = ( * self . recursion . get ( ) ) . as_mut_ptr ( ) ;
148
144
assert_eq ! (
149
- ( * lock) . load( Ordering :: Relaxed ) ,
145
+ self . lock. load( Ordering :: Relaxed ) ,
150
146
abi:: LOCK_UNLOCKED . 0 ,
151
147
"Attempted to destroy locked mutex"
152
148
) ;
153
- assert_eq ! ( * recursion, 0 , "Recursion counter invalid" ) ;
149
+ assert_eq ! ( self . recursion. get ( ) , 0 , "Recursion counter invalid" ) ;
154
150
}
155
151
}
0 commit comments