Skip to content

Commit

Permalink
chore: use new Winterfell release
Browse files Browse the repository at this point in the history
  • Loading branch information
Al-Kindi-0 committed Oct 28, 2024
1 parent dbb0cca commit 3e98e10
Show file tree
Hide file tree
Showing 11 changed files with 163 additions and 121 deletions.
208 changes: 110 additions & 98 deletions Cargo.lock

Large diffs are not rendered by default.

12 changes: 6 additions & 6 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ documentation = "https://docs.rs/miden-crypto/0.10.1"
categories = ["cryptography", "no-std"]
keywords = ["miden", "crypto", "hash", "merkle"]
edition = "2021"
rust-version = "1.80"
rust-version = "1.82"

[[bin]]
name = "miden-crypto"
Expand Down Expand Up @@ -52,20 +52,20 @@ num = { version = "0.4", default-features = false, features = ["alloc", "libm"]
num-complex = { version = "0.4", default-features = false }
rand = { version = "0.8", default-features = false }
rand_core = { version = "0.6", default-features = false }
rand-utils = { git = "https://github.com/facebook/winterfell", branch = "next", package = "winter-rand-utils", optional = true }
rand-utils = { version = "0.10", package = "winter-rand-utils", optional = true }
serde = { version = "1.0", default-features = false, optional = true, features = ["derive"] }
sha3 = { version = "0.10", default-features = false }
winter-crypto = { git = "https://github.com/facebook/winterfell", branch = "next", default-features = false }
winter-math = { git = "https://github.com/facebook/winterfell", branch = "next", default-features = false }
winter-utils = { git = "https://github.com/facebook/winterfell", branch = "next", default-features = false }
winter-crypto = { version = "0.10", default-features = false }
winter-math = { version = "0.10", default-features = false }
winter-utils = { version = "0.10", default-features = false }

[dev-dependencies]
criterion = { version = "0.5", features = ["html_reports"] }
getrandom = { version = "0.2", features = ["js"] }
hex = { version = "0.4", default-features = false, features = ["alloc"] }
proptest = "1.4"
rand_chacha = { version = "0.3", default-features = false }
rand-utils = { git = "https://github.com/facebook/winterfell", branch = "next", package = "winter-rand-utils" }
rand-utils = { version = "0.10", package = "winter-rand-utils" }
seq-macro = { version = "0.3" }

[build-dependencies]
Expand Down
16 changes: 16 additions & 0 deletions src/hash/blake/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ use core::{
ops::Deref,
slice::from_raw_parts,
};
use std::vec::Vec;

use super::{Digest, ElementHasher, Felt, FieldElement, Hasher};
use crate::utils::{
Expand Down Expand Up @@ -114,6 +115,11 @@ impl Hasher for Blake3_256 {
Self::hash(prepare_merge(values))
}

fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
Blake3Digest(blake3::hash(&bytes).into())
}

fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
Expand Down Expand Up @@ -174,6 +180,11 @@ impl Hasher for Blake3_192 {
Blake3Digest(*shrink_bytes(&blake3::hash(bytes).into()))
}

fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
Blake3Digest(*shrink_bytes(&blake3::hash(&bytes).into()))
}

fn merge(values: &[Self::Digest; 2]) -> Self::Digest {
Self::hash(prepare_merge(values))
}
Expand Down Expand Up @@ -242,6 +253,11 @@ impl Hasher for Blake3_160 {
Self::hash(prepare_merge(values))
}

fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let bytes: Vec<u8> = values.iter().flat_map(|v| v.as_bytes()).collect();
Blake3Digest(*shrink_bytes(&blake3::hash(&bytes).into()))
}

fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
let mut hasher = blake3::Hasher::new();
hasher.update(&seed.0);
Expand Down
24 changes: 12 additions & 12 deletions src/hash/rescue/mds/freq.rs
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
// FFT-BASED MDS MULTIPLICATION HELPER FUNCTIONS
// ================================================================================================

/// This module contains helper functions as well as constants used to perform the vector-matrix
/// multiplication step of the Rescue prime permutation. The special form of our MDS matrix
/// i.e. being circular, allows us to reduce the vector-matrix multiplication to a Hadamard product
/// of two vectors in "frequency domain". This follows from the simple fact that every circulant
/// matrix has the columns of the discrete Fourier transform matrix as orthogonal eigenvectors.
/// The implementation also avoids the use of 3-point FFTs, and 3-point iFFTs, and substitutes that
/// with explicit expressions. It also avoids, due to the form of our matrix in the frequency
/// domain, divisions by 2 and repeated modular reductions. This is because of our explicit choice
/// of an MDS matrix that has small powers of 2 entries in frequency domain.
/// The following implementation has benefited greatly from the discussions and insights of
/// Hamish Ivey-Law and Jacqueline Nabaglo of Polygon Zero and is base on Nabaglo's Plonky2
/// implementation.
// This module contains helper functions as well as constants used to perform the vector-matrix
// multiplication step of the Rescue prime permutation. The special form of our MDS matrix
// i.e. being circular, allows us to reduce the vector-matrix multiplication to a Hadamard product
// of two vectors in "frequency domain". This follows from the simple fact that every circulant
// matrix has the columns of the discrete Fourier transform matrix as orthogonal eigenvectors.
// The implementation also avoids the use of 3-point FFTs, and 3-point iFFTs, and substitutes that
// with explicit expressions. It also avoids, due to the form of our matrix in the frequency
// domain, divisions by 2 and repeated modular reductions. This is because of our explicit choice
// of an MDS matrix that has small powers of 2 entries in frequency domain.
// The following implementation has benefited greatly from the discussions and insights of
// Hamish Ivey-Law and Jacqueline Nabaglo of Polygon Zero and is base on Nabaglo's Plonky2
// implementation.

// Rescue MDS matrix in frequency domain.
// More precisely, this is the output of the three 4-point (real) FFTs of the first column of
Expand Down
7 changes: 7 additions & 0 deletions src/hash/rescue/rpo/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use alloc::vec::Vec;
use core::ops::Range;

use super::{
Expand Down Expand Up @@ -149,6 +150,12 @@ impl Hasher for Rpo256 {
RpoDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}

fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let elements: Vec<Felt> =
Self::Digest::digests_as_elements(values.iter()).copied().collect();
Self::hash_elements(&elements)
}

fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
// initialize the state as follows:
// - seed is copied into the first 4 elements of the rate portion of the state.
Expand Down
7 changes: 7 additions & 0 deletions src/hash/rescue/rpx/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use alloc::vec::Vec;
use core::ops::Range;

use super::{
Expand Down Expand Up @@ -155,6 +156,12 @@ impl Hasher for Rpx256 {
RpxDigest::new(state[DIGEST_RANGE].try_into().unwrap())
}

fn merge_many(values: &[Self::Digest]) -> Self::Digest {
let elements: Vec<Felt> =
Self::Digest::digests_as_elements(values.iter()).copied().collect();
Self::hash_elements(&elements)
}

fn merge_with_int(seed: Self::Digest, value: u64) -> Self::Digest {
// initialize the state as follows:
// - seed is copied into the first 4 elements of the rate portion of the state.
Expand Down
2 changes: 1 addition & 1 deletion src/merkle/merkle_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ pub struct InnerNodeIterator<'a> {
index: usize,
}

impl<'a> Iterator for InnerNodeIterator<'a> {
impl Iterator for InnerNodeIterator<'_> {
type Item = InnerNodeInfo;

fn next(&mut self) -> Option<Self::Item> {
Expand Down
2 changes: 1 addition & 1 deletion src/merkle/mmr/full.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,7 @@ pub struct MmrNodes<'a> {
index: usize,
}

impl<'a> Iterator for MmrNodes<'a> {
impl Iterator for MmrNodes<'_> {
type Item = InnerNodeInfo;

fn next(&mut self) -> Option<Self::Item> {
Expand Down
2 changes: 1 addition & 1 deletion src/merkle/mmr/partial.rs
Original file line number Diff line number Diff line change
Expand Up @@ -539,7 +539,7 @@ pub struct InnerNodeIterator<'a, I: Iterator<Item = (usize, RpoDigest)>> {
seen_nodes: BTreeSet<InOrderIndex>,
}

impl<'a, I: Iterator<Item = (usize, RpoDigest)>> Iterator for InnerNodeIterator<'a, I> {
impl<I: Iterator<Item = (usize, RpoDigest)>> Iterator for InnerNodeIterator<'_, I> {
type Item = InnerNodeInfo;

fn next(&mut self) -> Option<Self::Item> {
Expand Down
2 changes: 1 addition & 1 deletion src/merkle/path.rs
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ pub struct InnerNodeIterator<'a> {
value: RpoDigest,
}

impl<'a> Iterator for InnerNodeIterator<'a> {
impl Iterator for InnerNodeIterator<'_> {
type Item = InnerNodeInfo;

fn next(&mut self) -> Option<Self::Item> {
Expand Down
2 changes: 1 addition & 1 deletion src/merkle/smt/simple/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ impl<const DEPTH: u8> SimpleSmt<DEPTH> {
/// be queried with [`MutationSet::root()`]. Once a mutation set is returned,
/// [`SimpleSmt::apply_mutations()`] can be called in order to commit these changes to the
/// Merkle tree, or [`drop()`] to discard them.

///
/// # Example
/// ```
/// # use miden_crypto::{hash::rpo::RpoDigest, Felt, Word};
Expand Down

0 comments on commit 3e98e10

Please sign in to comment.