diff --git a/CHANGELOG.md b/CHANGELOG.md index 103b4ff..2fb5f24 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,5 @@ # Unreleased +- `StableHasher::finish` now returns a small hash instead of being fatal (#6) - Remove `StableHasher::finalize` (#4) - Import stable hasher implementation from rustc ([db8aca48129](https://github.com/rust-lang/rust/blob/db8aca48129d86b2623e3ac8cbcf2902d4d313ad/compiler/rustc_data_structures/src/)) diff --git a/src/sip128.rs b/src/sip128.rs index 724d96c..b5c5d87 100644 --- a/src/sip128.rs +++ b/src/sip128.rs @@ -377,41 +377,47 @@ impl SipHasher128 { } } - #[inline] + #[inline(always)] pub fn finish128(mut self) -> [u64; 2] { - debug_assert!(self.nbuf < BUFFER_SIZE); + SipHasher128::finish128_inner(self.nbuf, &mut self.buf, self.state, self.processed) + } - // Process full elements in buffer. - let last = self.nbuf / ELEM_SIZE; + #[inline] + fn finish128_inner( + nbuf: usize, + buf: &mut [MaybeUninit; BUFFER_WITH_SPILL_CAPACITY], + mut state: State, + processed: usize, + ) -> [u64; 2] { + debug_assert!(nbuf < BUFFER_SIZE); - // Since we're consuming self, avoid updating members for a potential - // performance gain. - let mut state = self.state; + // Process full elements in buffer. + let last = nbuf / ELEM_SIZE; for i in 0..last { - let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; + let elem = unsafe { buf.get_unchecked(i).assume_init().to_le() }; state.v3 ^= elem; Sip13Rounds::c_rounds(&mut state); state.v0 ^= elem; } // Get remaining partial element. - let elem = if self.nbuf % ELEM_SIZE != 0 { + let elem = if nbuf % ELEM_SIZE != 0 { unsafe { // Ensure element is initialized by writing zero bytes. At most // `ELEM_SIZE - 1` are required given the above check. It's safe // to write this many because we have the spill and we maintain // `self.nbuf` such that this write will start before the spill. - let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); + let dst = (buf.as_mut_ptr() as *mut u8).add(nbuf); ptr::write_bytes(dst, 0, ELEM_SIZE - 1); - self.buf.get_unchecked(last).assume_init().to_le() + buf.get_unchecked(last).assume_init().to_le() } } else { 0 }; // Finalize the hash. - let length = self.processed.debug_strict_add(self.nbuf); + let length = processed.debug_strict_add(nbuf); let b: u64 = ((length as u64 & 0xff) << 56) | elem; state.v3 ^= b; @@ -496,7 +502,11 @@ impl Hasher for SipHasher128 { } fn finish(&self) -> u64 { - panic!("SipHasher128 cannot provide valid 64 bit hashes") + let mut buf = self.buf.clone(); + let [a, b] = SipHasher128::finish128_inner(self.nbuf, &mut buf, self.state, self.processed); + + // Combining the two halves makes sure we get a good quality hash. + a.wrapping_mul(3).wrapping_add(b).to_le() } } diff --git a/src/sip128/tests.rs b/src/sip128/tests.rs index 71ee07d..d9c7edb 100644 --- a/src/sip128/tests.rs +++ b/src/sip128/tests.rs @@ -303,3 +303,13 @@ fn test_fill_buffer() { test_fill_buffer!(i128, write_i128); test_fill_buffer!(isize, write_isize); } + +#[test] +fn test_finish() { + let mut hasher = SipHasher128::new_with_keys(0, 0); + + hasher.write_isize(0xF0); + hasher.write_isize(0xF0010); + + assert_eq!(hasher.finish(), hasher.finish()); +} diff --git a/src/stable_hasher.rs b/src/stable_hasher.rs index 3086855..8239840 100644 --- a/src/stable_hasher.rs +++ b/src/stable_hasher.rs @@ -97,16 +97,11 @@ impl fmt::Debug for StableHasher { } impl Hasher for StableHasher { - ///
+ /// Returns a combined hash. /// - /// Do not use this function, it will unconditionnaly panic. - /// - /// Use instead [`StableHasher::finish`] which returns a - /// `[u64; 2]` for greater precision. - /// - ///
+ /// For greater precision use instead [`StableHasher::finish`]. fn finish(&self) -> u64 { - panic!("use StableHasher::finalize instead"); + self.state.finish() } #[inline]