@@ -2,7 +2,6 @@ pub use ::alloc::sync::Arc;
22use core:: ops:: { Deref , DerefMut } ;
33use core:: time:: Duration ;
44
5- use std:: collections:: HashSet ;
65use std:: cell:: RefCell ;
76
87use std:: sync:: atomic:: { AtomicUsize , Ordering } ;
@@ -13,8 +12,10 @@ use std::sync::RwLockReadGuard as StdRwLockReadGuard;
1312use std:: sync:: RwLockWriteGuard as StdRwLockWriteGuard ;
1413use std:: sync:: Condvar as StdCondvar ;
1514
15+ use prelude:: HashMap ;
16+
1617#[ cfg( feature = "backtrace" ) ]
17- use { prelude:: { HashMap , hash_map} , backtrace:: Backtrace , std:: sync:: Once } ;
18+ use { prelude:: hash_map, backtrace:: Backtrace , std:: sync:: Once } ;
1819
1920#[ cfg( not( feature = "backtrace" ) ) ]
2021struct Backtrace { }
@@ -48,7 +49,7 @@ impl Condvar {
4849
4950thread_local ! {
5051 /// We track the set of locks currently held by a reference to their `LockMetadata`
51- static LOCKS_HELD : RefCell <HashSet < Arc <LockMetadata >>> = RefCell :: new( HashSet :: new( ) ) ;
52+ static LOCKS_HELD : RefCell <HashMap < u64 , Arc <LockMetadata >>> = RefCell :: new( HashMap :: new( ) ) ;
5253}
5354static LOCK_IDX : AtomicUsize = AtomicUsize :: new ( 0 ) ;
5455
@@ -61,34 +62,13 @@ static LOCKS_INIT: Once = Once::new();
6162/// when the Mutex itself was constructed.
6263struct LockMetadata {
6364 lock_idx : u64 ,
64- locked_before : StdMutex < HashSet < LockDep > > ,
65+ locked_before : StdMutex < HashMap < u64 , LockDep > > ,
6566 _lock_construction_bt : Backtrace ,
6667}
67- impl PartialEq for LockMetadata {
68- fn eq ( & self , o : & LockMetadata ) -> bool { self . lock_idx == o. lock_idx }
69- }
70- impl Eq for LockMetadata { }
71- impl std:: hash:: Hash for LockMetadata {
72- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock_idx ) ; }
73- }
7468
7569struct LockDep {
7670 lock : Arc < LockMetadata > ,
77- lockdep_trace : Option < Backtrace > ,
78- }
79- impl LockDep {
80- /// Note that `Backtrace::new()` is rather expensive so we rely on the caller to fill in the
81- /// `lockdep_backtrace` field after ensuring we need it.
82- fn new_without_bt ( lock : & Arc < LockMetadata > ) -> Self {
83- Self { lock : Arc :: clone ( lock) , lockdep_trace : None }
84- }
85- }
86- impl PartialEq for LockDep {
87- fn eq ( & self , o : & LockDep ) -> bool { self . lock . lock_idx == o. lock . lock_idx }
88- }
89- impl Eq for LockDep { }
90- impl std:: hash:: Hash for LockDep {
91- fn hash < H : std:: hash:: Hasher > ( & self , hasher : & mut H ) { hasher. write_u64 ( self . lock . lock_idx ) ; }
71+ lockdep_trace : Backtrace ,
9272}
9373
9474#[ cfg( feature = "backtrace" ) ]
@@ -123,7 +103,7 @@ impl LockMetadata {
123103 let lock_idx = LOCK_IDX . fetch_add ( 1 , Ordering :: Relaxed ) as u64 ;
124104
125105 let res = Arc :: new ( LockMetadata {
126- locked_before : StdMutex :: new ( HashSet :: new ( ) ) ,
106+ locked_before : StdMutex :: new ( HashMap :: new ( ) ) ,
127107 lock_idx,
128108 _lock_construction_bt : backtrace,
129109 } ) ;
@@ -148,20 +128,20 @@ impl LockMetadata {
148128 // For each lock which is currently locked, check that no lock's locked-before
149129 // set includes the lock we're about to lock, which would imply a lockorder
150130 // inversion.
151- for locked in held. borrow ( ) . iter ( ) {
152- if read && * locked == * this {
131+ for ( locked_idx , _locked ) in held. borrow ( ) . iter ( ) {
132+ if read && * locked_idx == this. lock_idx {
153133 // Recursive read locks are explicitly allowed
154134 return ;
155135 }
156136 }
157- for locked in held. borrow ( ) . iter ( ) {
158- if !read && * locked == * this {
137+ for ( locked_idx , locked) in held. borrow ( ) . iter ( ) {
138+ if !read && * locked_idx == this. lock_idx {
159139 // With `feature = "backtrace"` set, we may be looking at different instances
160140 // of the same lock.
161141 debug_assert ! ( cfg!( feature = "backtrace" ) , "Tried to acquire a lock while it was held!" ) ;
162142 }
163- for locked_dep in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
164- if locked_dep . lock == * this && locked_dep . lock != * locked {
143+ for ( locked_dep_idx , locked_dep) in locked. locked_before . lock ( ) . unwrap ( ) . iter ( ) {
144+ if * locked_dep_idx == this. lock_idx && * locked_dep_idx != locked. lock_idx {
165145 #[ cfg( feature = "backtrace" ) ]
166146 panic ! ( "Tried to violate existing lockorder.\n Mutex that should be locked after the current lock was created at the following backtrace.\n Note that to get a backtrace for the lockorder violation, you should set RUST_BACKTRACE=1\n Lock being taken constructed at: {} ({}):\n {:?}\n Lock constructed at: {} ({})\n {:?}\n \n Lock dep created at:\n {:?}\n \n " ,
167147 get_construction_location( & this. _lock_construction_bt) , this. lock_idx, this. _lock_construction_bt,
@@ -173,13 +153,12 @@ impl LockMetadata {
173153 }
174154 // Insert any already-held locks in our locked-before set.
175155 let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
176- let mut lockdep = LockDep :: new_without_bt ( locked) ;
177- if !locked_before. contains ( & lockdep) {
178- lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
179- locked_before. insert ( lockdep) ;
156+ if !locked_before. contains_key ( & locked. lock_idx ) {
157+ let lockdep = LockDep { lock : Arc :: clone ( locked) , lockdep_trace : Backtrace :: new ( ) } ;
158+ locked_before. insert ( lockdep. lock . lock_idx , lockdep) ;
180159 }
181160 }
182- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
161+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
183162 inserted = true ;
184163 } ) ;
185164 inserted
@@ -194,14 +173,13 @@ impl LockMetadata {
194173 // consider try-locks to ever generate lockorder inversions. However, if a try-lock
195174 // succeeds, we do consider it to have created lockorder dependencies.
196175 let mut locked_before = this. locked_before . lock ( ) . unwrap ( ) ;
197- for locked in held. borrow ( ) . iter ( ) {
198- let mut lockdep = LockDep :: new_without_bt ( locked) ;
199- if !locked_before. contains ( & lockdep) {
200- lockdep. lockdep_trace = Some ( Backtrace :: new ( ) ) ;
201- locked_before. insert ( lockdep) ;
176+ for ( locked_idx, locked) in held. borrow ( ) . iter ( ) {
177+ if !locked_before. contains_key ( locked_idx) {
178+ let lockdep = LockDep { lock : Arc :: clone ( locked) , lockdep_trace : Backtrace :: new ( ) } ;
179+ locked_before. insert ( * locked_idx, lockdep) ;
202180 }
203181 }
204- held. borrow_mut ( ) . insert ( Arc :: clone ( this) ) ;
182+ held. borrow_mut ( ) . insert ( this . lock_idx , Arc :: clone ( this) ) ;
205183 } ) ;
206184 }
207185}
@@ -231,7 +209,7 @@ impl<'a, T: Sized> MutexGuard<'a, T> {
231209impl < T : Sized > Drop for MutexGuard < ' _ , T > {
232210 fn drop ( & mut self ) {
233211 LOCKS_HELD . with ( |held| {
234- held. borrow_mut ( ) . remove ( & self . mutex . deps ) ;
212+ held. borrow_mut ( ) . remove ( & self . mutex . deps . lock_idx ) ;
235213 } ) ;
236214 }
237215}
@@ -302,7 +280,7 @@ impl<T: Sized> Drop for RwLockReadGuard<'_, T> {
302280 return ;
303281 }
304282 LOCKS_HELD . with ( |held| {
305- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
283+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
306284 } ) ;
307285 }
308286}
@@ -318,7 +296,7 @@ impl<T: Sized> Deref for RwLockWriteGuard<'_, T> {
318296impl < T : Sized > Drop for RwLockWriteGuard < ' _ , T > {
319297 fn drop ( & mut self ) {
320298 LOCKS_HELD . with ( |held| {
321- held. borrow_mut ( ) . remove ( & self . lock . deps ) ;
299+ held. borrow_mut ( ) . remove ( & self . lock . deps . lock_idx ) ;
322300 } ) ;
323301 }
324302}
0 commit comments