@@ -33,13 +33,10 @@ impl<T: Internable> Interned<T> {
3333 // - if not, box it up, insert it, and return a clone
3434 // This needs to be atomic (locking the shard) to avoid races with other thread, which could
3535 // insert the same object between us looking it up and inserting it.
36- match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash as u64 , & obj) {
36+ match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash, & obj) {
3737 RawEntryMut :: Occupied ( occ) => Self { arc : occ. key ( ) . clone ( ) } ,
3838 RawEntryMut :: Vacant ( vac) => Self {
39- arc : vac
40- . insert_hashed_nocheck ( hash as u64 , Arc :: new ( obj) , SharedValue :: new ( ( ) ) )
41- . 0
42- . clone ( ) ,
39+ arc : vac. insert_hashed_nocheck ( hash, Arc :: new ( obj) , SharedValue :: new ( ( ) ) ) . 0 . clone ( ) ,
4340 } ,
4441 }
4542 }
@@ -54,13 +51,10 @@ impl Interned<str> {
5451 // - if not, box it up, insert it, and return a clone
5552 // This needs to be atomic (locking the shard) to avoid races with other thread, which could
5653 // insert the same object between us looking it up and inserting it.
57- match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash as u64 , s) {
54+ match shard. raw_entry_mut ( ) . from_key_hashed_nocheck ( hash, s) {
5855 RawEntryMut :: Occupied ( occ) => Self { arc : occ. key ( ) . clone ( ) } ,
5956 RawEntryMut :: Vacant ( vac) => Self {
60- arc : vac
61- . insert_hashed_nocheck ( hash as u64 , Arc :: from ( s) , SharedValue :: new ( ( ) ) )
62- . 0
63- . clone ( ) ,
57+ arc : vac. insert_hashed_nocheck ( hash, Arc :: from ( s) , SharedValue :: new ( ( ) ) ) . 0 . clone ( ) ,
6458 } ,
6559 }
6660 }
0 commit comments