10
10
//! It internally uses `parking_lot::RwLock` if cfg!(parallel_queries) is true,
11
11
//! `RefCell` otherwise.
12
12
//!
13
- //! `LockCell` is a thread safe version of `Cell`, with `set` and `get` operations.
14
- //! It can never deadlock. It uses `Cell` when
15
- //! cfg!(parallel_queries) is false, otherwise it is a `Lock`.
16
- //!
17
13
//! `MTLock` is a mutex which disappears if cfg!(parallel_queries) is false.
18
14
//!
19
15
//! `MTRef` is a immutable reference if cfg!(parallel_queries), and an mutable reference otherwise.
23
19
24
20
use std:: collections:: HashMap ;
25
21
use std:: hash:: { Hash , BuildHasher } ;
26
- use std:: cmp:: Ordering ;
27
22
use std:: marker:: PhantomData ;
28
- use std:: fmt:: Debug ;
29
- use std:: fmt:: Formatter ;
30
- use std:: fmt;
31
23
use std:: ops:: { Deref , DerefMut } ;
32
24
use owning_ref:: { Erased , OwningRef } ;
33
25
@@ -54,6 +46,9 @@ pub fn serial_scope<F, R>(f: F) -> R
54
46
f ( & SerialScope )
55
47
}
56
48
49
+ pub use std:: sync:: atomic:: Ordering :: SeqCst ;
50
+ pub use std:: sync:: atomic:: Ordering ;
51
+
57
52
cfg_if ! {
58
53
if #[ cfg( not( parallel_queries) ) ] {
59
54
pub auto trait Send { }
@@ -69,6 +64,62 @@ cfg_if! {
69
64
}
70
65
}
71
66
67
+ use std:: ops:: Add ;
68
+
69
+ #[ derive( Debug ) ]
70
+ pub struct Atomic <T : Copy >( Cell <T >) ;
71
+
72
+ impl <T : Copy > Atomic <T > {
73
+ pub fn new( v: T ) -> Self {
74
+ Atomic ( Cell :: new( v) )
75
+ }
76
+ }
77
+
78
+ impl <T : Copy + PartialEq > Atomic <T > {
79
+ pub fn into_inner( self ) -> T {
80
+ self . 0 . into_inner( )
81
+ }
82
+
83
+ pub fn load( & self , _: Ordering ) -> T {
84
+ self . 0 . get( )
85
+ }
86
+
87
+ pub fn store( & self , val: T , _: Ordering ) {
88
+ self . 0 . set( val)
89
+ }
90
+
91
+ pub fn swap( & self , val: T , _: Ordering ) -> T {
92
+ self . 0 . replace( val)
93
+ }
94
+
95
+ pub fn compare_exchange( & self ,
96
+ current: T ,
97
+ new: T ,
98
+ _: Ordering ,
99
+ _: Ordering )
100
+ -> Result <T , T > {
101
+ let read = self . 0 . get( ) ;
102
+ if read == current {
103
+ self . 0 . set( new) ;
104
+ Ok ( read)
105
+ } else {
106
+ Err ( read)
107
+ }
108
+ }
109
+ }
110
+
111
+ impl <T : Add <Output =T > + Copy > Atomic <T > {
112
+ pub fn fetch_add( & self , val: T , _: Ordering ) -> T {
113
+ let old = self . 0 . get( ) ;
114
+ self . 0 . set( old + val) ;
115
+ old
116
+ }
117
+ }
118
+
119
+ pub type AtomicUsize = Atomic <usize >;
120
+ pub type AtomicBool = Atomic <bool >;
121
+ pub type AtomicU64 = Atomic <u64 >;
122
+
72
123
pub use self :: serial_join as join;
73
124
pub use self :: serial_scope as scope;
74
125
@@ -160,47 +211,6 @@ cfg_if! {
160
211
MTLock ( self . 0 . clone( ) )
161
212
}
162
213
}
163
-
164
- pub struct LockCell <T >( Cell <T >) ;
165
-
166
- impl <T > LockCell <T > {
167
- #[ inline( always) ]
168
- pub fn new( inner: T ) -> Self {
169
- LockCell ( Cell :: new( inner) )
170
- }
171
-
172
- #[ inline( always) ]
173
- pub fn into_inner( self ) -> T {
174
- self . 0 . into_inner( )
175
- }
176
-
177
- #[ inline( always) ]
178
- pub fn set( & self , new_inner: T ) {
179
- self . 0 . set( new_inner) ;
180
- }
181
-
182
- #[ inline( always) ]
183
- pub fn get( & self ) -> T where T : Copy {
184
- self . 0 . get( )
185
- }
186
-
187
- #[ inline( always) ]
188
- pub fn set_mut( & mut self , new_inner: T ) {
189
- self . 0 . set( new_inner) ;
190
- }
191
-
192
- #[ inline( always) ]
193
- pub fn get_mut( & mut self ) -> T where T : Copy {
194
- self . 0 . get( )
195
- }
196
- }
197
-
198
- impl <T > LockCell <Option <T >> {
199
- #[ inline( always) ]
200
- pub fn take( & self ) -> Option <T > {
201
- unsafe { ( * self . 0 . as_ptr( ) ) . take( ) }
202
- }
203
- }
204
214
} else {
205
215
pub use std:: marker:: Send as Send ;
206
216
pub use std:: marker:: Sync as Sync ;
@@ -213,6 +223,8 @@ cfg_if! {
213
223
pub use parking_lot:: MutexGuard as LockGuard ;
214
224
pub use parking_lot:: MappedMutexGuard as MappedLockGuard ;
215
225
226
+ pub use std:: sync:: atomic:: { AtomicBool , AtomicUsize , AtomicU64 } ;
227
+
216
228
pub use std:: sync:: Arc as Lrc ;
217
229
pub use std:: sync:: Weak as Weak ;
218
230
@@ -278,47 +290,6 @@ cfg_if! {
278
290
v. erase_send_sync_owner( )
279
291
} }
280
292
}
281
-
282
- pub struct LockCell <T >( Lock <T >) ;
283
-
284
- impl <T > LockCell <T > {
285
- #[ inline( always) ]
286
- pub fn new( inner: T ) -> Self {
287
- LockCell ( Lock :: new( inner) )
288
- }
289
-
290
- #[ inline( always) ]
291
- pub fn into_inner( self ) -> T {
292
- self . 0 . into_inner( )
293
- }
294
-
295
- #[ inline( always) ]
296
- pub fn set( & self , new_inner: T ) {
297
- * self . 0 . lock( ) = new_inner;
298
- }
299
-
300
- #[ inline( always) ]
301
- pub fn get( & self ) -> T where T : Copy {
302
- * self . 0 . lock( )
303
- }
304
-
305
- #[ inline( always) ]
306
- pub fn set_mut( & mut self , new_inner: T ) {
307
- * self . 0 . get_mut( ) = new_inner;
308
- }
309
-
310
- #[ inline( always) ]
311
- pub fn get_mut( & mut self ) -> T where T : Copy {
312
- * self . 0 . get_mut( )
313
- }
314
- }
315
-
316
- impl <T > LockCell <Option <T >> {
317
- #[ inline( always) ]
318
- pub fn take( & self ) -> Option <T > {
319
- self . 0 . lock( ) . take( )
320
- }
321
- }
322
293
}
323
294
}
324
295
@@ -467,65 +438,6 @@ impl<T> Once<T> {
467
438
}
468
439
}
469
440
470
- impl < T : Copy + Debug > Debug for LockCell < T > {
471
- fn fmt ( & self , f : & mut Formatter ) -> fmt:: Result {
472
- f. debug_struct ( "LockCell" )
473
- . field ( "value" , & self . get ( ) )
474
- . finish ( )
475
- }
476
- }
477
-
478
- impl < T : Default > Default for LockCell < T > {
479
- /// Creates a `LockCell<T>`, with the `Default` value for T.
480
- #[ inline]
481
- fn default ( ) -> LockCell < T > {
482
- LockCell :: new ( Default :: default ( ) )
483
- }
484
- }
485
-
486
- impl < T : PartialEq + Copy > PartialEq for LockCell < T > {
487
- #[ inline]
488
- fn eq ( & self , other : & LockCell < T > ) -> bool {
489
- self . get ( ) == other. get ( )
490
- }
491
- }
492
-
493
- impl < T : Eq + Copy > Eq for LockCell < T > { }
494
-
495
- impl < T : PartialOrd + Copy > PartialOrd for LockCell < T > {
496
- #[ inline]
497
- fn partial_cmp ( & self , other : & LockCell < T > ) -> Option < Ordering > {
498
- self . get ( ) . partial_cmp ( & other. get ( ) )
499
- }
500
-
501
- #[ inline]
502
- fn lt ( & self , other : & LockCell < T > ) -> bool {
503
- self . get ( ) < other. get ( )
504
- }
505
-
506
- #[ inline]
507
- fn le ( & self , other : & LockCell < T > ) -> bool {
508
- self . get ( ) <= other. get ( )
509
- }
510
-
511
- #[ inline]
512
- fn gt ( & self , other : & LockCell < T > ) -> bool {
513
- self . get ( ) > other. get ( )
514
- }
515
-
516
- #[ inline]
517
- fn ge ( & self , other : & LockCell < T > ) -> bool {
518
- self . get ( ) >= other. get ( )
519
- }
520
- }
521
-
522
- impl < T : Ord + Copy > Ord for LockCell < T > {
523
- #[ inline]
524
- fn cmp ( & self , other : & LockCell < T > ) -> Ordering {
525
- self . get ( ) . cmp ( & other. get ( ) )
526
- }
527
- }
528
-
529
441
#[ derive( Debug ) ]
530
442
pub struct Lock < T > ( InnerLock < T > ) ;
531
443
0 commit comments