20
20
//! It internally uses `parking_lot::RwLock` if cfg!(parallel_queries) is true,
21
21
//! `RefCell` otherwise.
22
22
//!
23
- //! `LockCell` is a thread safe version of `Cell`, with `set` and `get` operations.
24
- //! It can never deadlock. It uses `Cell` when
25
- //! cfg!(parallel_queries) is false, otherwise it is a `Lock`.
26
- //!
27
23
//! `MTLock` is a mutex which disappears if cfg!(parallel_queries) is false.
28
24
//!
29
25
//! `MTRef` is a immutable reference if cfg!(parallel_queries), and an mutable reference otherwise.
33
29
34
30
use std:: collections:: HashMap ;
35
31
use std:: hash:: { Hash , BuildHasher } ;
36
- use std:: cmp:: Ordering ;
37
32
use std:: marker:: PhantomData ;
38
- use std:: fmt:: Debug ;
39
- use std:: fmt:: Formatter ;
40
- use std:: fmt;
41
33
use std:: ops:: { Deref , DerefMut } ;
42
34
use owning_ref:: { Erased , OwningRef } ;
43
35
@@ -64,6 +56,9 @@ pub fn serial_scope<F, R>(f: F) -> R
64
56
f ( & SerialScope )
65
57
}
66
58
59
+ pub use std:: sync:: atomic:: Ordering :: SeqCst ;
60
+ pub use std:: sync:: atomic:: Ordering ;
61
+
67
62
cfg_if ! {
68
63
if #[ cfg( not( parallel_queries) ) ] {
69
64
pub auto trait Send { }
@@ -79,6 +74,62 @@ cfg_if! {
79
74
}
80
75
}
81
76
77
+ use std:: ops:: Add ;
78
+
79
+ #[ derive( Debug ) ]
80
+ pub struct Atomic <T : Copy >( Cell <T >) ;
81
+
82
+ impl <T : Copy > Atomic <T > {
83
+ pub fn new( v: T ) -> Self {
84
+ Atomic ( Cell :: new( v) )
85
+ }
86
+ }
87
+
88
+ impl <T : Copy + PartialEq > Atomic <T > {
89
+ pub fn into_inner( self ) -> T {
90
+ self . 0 . into_inner( )
91
+ }
92
+
93
+ pub fn load( & self , _: Ordering ) -> T {
94
+ self . 0 . get( )
95
+ }
96
+
97
+ pub fn store( & self , val: T , _: Ordering ) {
98
+ self . 0 . set( val)
99
+ }
100
+
101
+ pub fn swap( & self , val: T , _: Ordering ) -> T {
102
+ self . 0 . replace( val)
103
+ }
104
+
105
+ pub fn compare_exchange( & self ,
106
+ current: T ,
107
+ new: T ,
108
+ _: Ordering ,
109
+ _: Ordering )
110
+ -> Result <T , T > {
111
+ let read = self . 0 . get( ) ;
112
+ if read == current {
113
+ self . 0 . set( new) ;
114
+ Ok ( read)
115
+ } else {
116
+ Err ( read)
117
+ }
118
+ }
119
+ }
120
+
121
+ impl <T : Add <Output =T > + Copy > Atomic <T > {
122
+ pub fn fetch_add( & self , val: T , _: Ordering ) -> T {
123
+ let old = self . 0 . get( ) ;
124
+ self . 0 . set( old + val) ;
125
+ old
126
+ }
127
+ }
128
+
129
+ pub type AtomicUsize = Atomic <usize >;
130
+ pub type AtomicBool = Atomic <bool >;
131
+ pub type AtomicU64 = Atomic <u64 >;
132
+
82
133
pub use self :: serial_join as join;
83
134
pub use self :: serial_scope as scope;
84
135
@@ -170,47 +221,6 @@ cfg_if! {
170
221
MTLock ( self . 0 . clone( ) )
171
222
}
172
223
}
173
-
174
- pub struct LockCell <T >( Cell <T >) ;
175
-
176
- impl <T > LockCell <T > {
177
- #[ inline( always) ]
178
- pub fn new( inner: T ) -> Self {
179
- LockCell ( Cell :: new( inner) )
180
- }
181
-
182
- #[ inline( always) ]
183
- pub fn into_inner( self ) -> T {
184
- self . 0 . into_inner( )
185
- }
186
-
187
- #[ inline( always) ]
188
- pub fn set( & self , new_inner: T ) {
189
- self . 0 . set( new_inner) ;
190
- }
191
-
192
- #[ inline( always) ]
193
- pub fn get( & self ) -> T where T : Copy {
194
- self . 0 . get( )
195
- }
196
-
197
- #[ inline( always) ]
198
- pub fn set_mut( & mut self , new_inner: T ) {
199
- self . 0 . set( new_inner) ;
200
- }
201
-
202
- #[ inline( always) ]
203
- pub fn get_mut( & mut self ) -> T where T : Copy {
204
- self . 0 . get( )
205
- }
206
- }
207
-
208
- impl <T > LockCell <Option <T >> {
209
- #[ inline( always) ]
210
- pub fn take( & self ) -> Option <T > {
211
- unsafe { ( * self . 0 . as_ptr( ) ) . take( ) }
212
- }
213
- }
214
224
} else {
215
225
pub use std:: marker:: Send as Send ;
216
226
pub use std:: marker:: Sync as Sync ;
@@ -223,6 +233,8 @@ cfg_if! {
223
233
pub use parking_lot:: MutexGuard as LockGuard ;
224
234
pub use parking_lot:: MappedMutexGuard as MappedLockGuard ;
225
235
236
+ pub use std:: sync:: atomic:: { AtomicBool , AtomicUsize , AtomicU64 } ;
237
+
226
238
pub use std:: sync:: Arc as Lrc ;
227
239
pub use std:: sync:: Weak as Weak ;
228
240
@@ -288,47 +300,6 @@ cfg_if! {
288
300
v. erase_send_sync_owner( )
289
301
} }
290
302
}
291
-
292
- pub struct LockCell <T >( Lock <T >) ;
293
-
294
- impl <T > LockCell <T > {
295
- #[ inline( always) ]
296
- pub fn new( inner: T ) -> Self {
297
- LockCell ( Lock :: new( inner) )
298
- }
299
-
300
- #[ inline( always) ]
301
- pub fn into_inner( self ) -> T {
302
- self . 0 . into_inner( )
303
- }
304
-
305
- #[ inline( always) ]
306
- pub fn set( & self , new_inner: T ) {
307
- * self . 0 . lock( ) = new_inner;
308
- }
309
-
310
- #[ inline( always) ]
311
- pub fn get( & self ) -> T where T : Copy {
312
- * self . 0 . lock( )
313
- }
314
-
315
- #[ inline( always) ]
316
- pub fn set_mut( & mut self , new_inner: T ) {
317
- * self . 0 . get_mut( ) = new_inner;
318
- }
319
-
320
- #[ inline( always) ]
321
- pub fn get_mut( & mut self ) -> T where T : Copy {
322
- * self . 0 . get_mut( )
323
- }
324
- }
325
-
326
- impl <T > LockCell <Option <T >> {
327
- #[ inline( always) ]
328
- pub fn take( & self ) -> Option <T > {
329
- self . 0 . lock( ) . take( )
330
- }
331
- }
332
303
}
333
304
}
334
305
@@ -476,65 +447,6 @@ impl<T> Once<T> {
476
447
}
477
448
}
478
449
479
- impl < T : Copy + Debug > Debug for LockCell < T > {
480
- fn fmt ( & self , f : & mut Formatter ) -> fmt:: Result {
481
- f. debug_struct ( "LockCell" )
482
- . field ( "value" , & self . get ( ) )
483
- . finish ( )
484
- }
485
- }
486
-
487
- impl < T : Default > Default for LockCell < T > {
488
- /// Creates a `LockCell<T>`, with the `Default` value for T.
489
- #[ inline]
490
- fn default ( ) -> LockCell < T > {
491
- LockCell :: new ( Default :: default ( ) )
492
- }
493
- }
494
-
495
- impl < T : PartialEq + Copy > PartialEq for LockCell < T > {
496
- #[ inline]
497
- fn eq ( & self , other : & LockCell < T > ) -> bool {
498
- self . get ( ) == other. get ( )
499
- }
500
- }
501
-
502
- impl < T : Eq + Copy > Eq for LockCell < T > { }
503
-
504
- impl < T : PartialOrd + Copy > PartialOrd for LockCell < T > {
505
- #[ inline]
506
- fn partial_cmp ( & self , other : & LockCell < T > ) -> Option < Ordering > {
507
- self . get ( ) . partial_cmp ( & other. get ( ) )
508
- }
509
-
510
- #[ inline]
511
- fn lt ( & self , other : & LockCell < T > ) -> bool {
512
- self . get ( ) < other. get ( )
513
- }
514
-
515
- #[ inline]
516
- fn le ( & self , other : & LockCell < T > ) -> bool {
517
- self . get ( ) <= other. get ( )
518
- }
519
-
520
- #[ inline]
521
- fn gt ( & self , other : & LockCell < T > ) -> bool {
522
- self . get ( ) > other. get ( )
523
- }
524
-
525
- #[ inline]
526
- fn ge ( & self , other : & LockCell < T > ) -> bool {
527
- self . get ( ) >= other. get ( )
528
- }
529
- }
530
-
531
- impl < T : Ord + Copy > Ord for LockCell < T > {
532
- #[ inline]
533
- fn cmp ( & self , other : & LockCell < T > ) -> Ordering {
534
- self . get ( ) . cmp ( & other. get ( ) )
535
- }
536
- }
537
-
538
450
#[ derive( Debug ) ]
539
451
pub struct Lock < T > ( InnerLock < T > ) ;
540
452
0 commit comments